Compare commits

..

1 Commits

Author SHA1 Message Date
Andrei Isvoran
b562ffdf10 RED-9942 - Don't skip images on file 2024-08-23 13:03:25 +03:00
177 changed files with 6096 additions and 4441 deletions

View File

@ -21,6 +21,5 @@ deploy:
dotenv: version.env dotenv: version.env
rules: rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH =~ /^feature/ && $CI_COMMIT_TAG == ""
- if: $CI_COMMIT_BRANCH =~ /^release/ - if: $CI_COMMIT_BRANCH =~ /^release/
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG

View File

@ -8,8 +8,6 @@ plugins {
group = "com.knecon.fforesight" group = "com.knecon.fforesight"
val documentVersion by rootProject.extra { "4.433.0" }
java.sourceCompatibility = JavaVersion.VERSION_17 java.sourceCompatibility = JavaVersion.VERSION_17
java.targetCompatibility = JavaVersion.VERSION_17 java.targetCompatibility = JavaVersion.VERSION_17
@ -53,10 +51,6 @@ allprojects {
} }
} }
pmd {
setConsoleOutput(true)
}
publishing { publishing {
publications { publications {
create<MavenPublication>(name) { create<MavenPublication>(name) {

View File

@ -0,0 +1,28 @@
package com.knecon.fforesight.service.layoutparser.internal.api.data.redaction;
import java.io.Serializable;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@AllArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
@Schema(description = "Object containing the complete document layout parsing information. It is split into 4 categories, structure, text, positions and pages: " + "The document tree structure of SemanticNodes such as Section, Paragraph, Headline, etc. " + "The text, which is stored as separate blocks of data. " + "The text positions, which are also stored as separate blocks. The Blocks are equal to the text blocks in length and order. " + "The page information.")
public class DocumentData implements Serializable {
@Schema(description = "Contains information about the document's pages.")
DocumentPage[] documentPages;
@Schema(description = "Contains information about the document's text.")
DocumentTextData[] documentTextData;
@Schema(description = "Contains information about the document's text positions.")
DocumentPositionData[] documentPositions;
@Schema(description = "Contains information about the document's semantic structure.")
DocumentStructure documentStructure;
}

View File

@ -0,0 +1,30 @@
package com.knecon.fforesight.service.layoutparser.internal.api.data.redaction;
import java.io.Serializable;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
@Schema(description = "Object containing information about the document's pages.")
public class DocumentPage implements Serializable {
@Schema(description = "The page number, starting with 1.")
int number;
@Schema(description = "The page height in PDF user units.", example = "792")
int height;
@Schema(description = "The page width in PDF user units.", example = "694")
int width;
@Schema(description = "The page rotation as specified by the PDF.", example = "90", allowableValues = {"0", "90", "180", "270"})
int rotation;
}

View File

@ -0,0 +1,28 @@
package com.knecon.fforesight.service.layoutparser.internal.api.data.redaction;
import java.io.Serializable;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
@Schema(description = "Object containing text positional information of a specific text block. A document is split into multiple text blocks, which are supposed to be read in order. Every text block can only occur on a single page.")
public class DocumentPositionData implements Serializable {
@Schema(description = "Identifier of the text block.")
Long id;
@Schema(description = "For each string coordinate in the search text of the text block, the array contains an entry relating the string coordinate to the position coordinate. This is required due to the text and position coordinates not being equal.")
int[] stringIdxToPositionIdx;
@Schema(description = "The bounding box for each glyph as a rectangle. This matrix is of size (n,4), where n is the number of glyphs in the text block. The second dimension specifies the rectangle with the value x, y, width, height, with x, y specifying the lower left corner. In order to access this information, the stringIdxToPositionIdx array must be used to transform the coordinates.")
float[][] positions;
}

View File

@ -0,0 +1,172 @@
package com.knecon.fforesight.service.layoutparser.internal.api.data.redaction;
import java.awt.geom.Rectangle2D;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Stream;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
@Schema(description = "Object containing information about the parsed tree structure of the SemanticNodes, such as Section, Paragraph, Headline etc inside of the document.")
public class DocumentStructure implements Serializable {
@Schema(description = "The root EntryData represents the Document.")
EntryData root;
@Schema(description = "Object containing the extra field names, a table has in its properties field.")
public static class TableProperties implements Serializable {
public static final String NUMBER_OF_ROWS = "numberOfRows";
public static final String NUMBER_OF_COLS = "numberOfCols";
}
@Schema(description = "Object containing the extra field names, an Image has in its properties field.")
public static class ImageProperties implements Serializable {
public static final String TRANSPARENT = "transparent";
public static final String IMAGE_TYPE = "imageType";
public static final String POSITION = "position";
public static final String ID = "id";
public static final String REPRESENTATION_HASH = "representationHash";
}
@Schema(description = "Object containing the extra field names, a table cell has in its properties field.")
public static class TableCellProperties implements Serializable {
public static final String B_BOX = "bBox";
public static final String ROW = "row";
public static final String COL = "col";
public static final String HEADER = "header";
}
@Schema(description = "Object containing the extra field names, a duplicate paragraph has in its properties field.")
public static class DuplicateParagraphProperties implements Serializable {
public static final String UNSORTED_TEXTBLOCK_ID = "utbid";
}
public static final String RECTANGLE_DELIMITER = ";";
public static Rectangle2D parseRectangle2D(String bBox) {
List<Float> floats = Arrays.stream(bBox.split(RECTANGLE_DELIMITER))
.map(Float::parseFloat)
.toList();
return new Rectangle2D.Float(floats.get(0), floats.get(1), floats.get(2), floats.get(3));
}
public static double[] parseRepresentationVector(String representationHash) {
String[] stringArray = representationHash.split("[,\\s]+");
double[] doubleArray = new double[stringArray.length];
for (int i = 0; i < stringArray.length; i++) {
doubleArray[i] = Double.parseDouble(stringArray[i]);
}
return doubleArray;
}
public EntryData get(List<Integer> tocId) {
if (tocId.isEmpty()) {
return root;
}
EntryData entry = root.children.get(tocId.get(0));
for (int id : tocId.subList(1, tocId.size())) {
entry = entry.children.get(id);
}
return entry;
}
public Stream<EntryData> streamAllEntries() {
return Stream.concat(Stream.of(root), root.children.stream())
.flatMap(DocumentStructure::flatten);
}
public String toString() {
return String.join("\n",
streamAllEntries().map(EntryData::toString)
.toList());
}
private static Stream<EntryData> flatten(EntryData entry) {
return Stream.concat(Stream.of(entry),
entry.children.stream()
.flatMap(DocumentStructure::flatten));
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
@Schema(description = "Object containing information of a SemanticNode and also structuring the layout with children.")
public static class EntryData implements Serializable {
@Schema(description = "Type of the semantic node.", allowableValues = {"DOCUMENT", "SECTION", "PARAGRAPH", "HEADLINE", "TABLE", "TABLE_CELL", "HEADER", "FOOTER", "IMAGE"})
NodeType type;
@Schema(description = "Specifies the position in the parsed tree structure.", example = "[1, 0, 2]")
int[] treeId;
@Schema(description = "Specifies the text block IDs associated with this semantic node. The value should be joined with the DocumentTextData/DocumentPositionData. Is empty, if no text block is directly associated with this semantic node. Only Paragraph, Headline, Header or Footer is directly associated with a text block.", example = "[1]")
Long[] atomicBlockIds;
@Schema(description = "Specifies the pages this semantic node appears on. The value should be joined with the PageData.", example = "[1, 2, 3]")
Long[] pageNumbers;
@Schema(description = "Some semantic nodes have additional information, this information is stored in this Map. The extra fields are specified by the Properties subclasses.", example = "For a Table: {\"numberOfRows\": 3, \"numberOfCols\": 4}")
Map<String, String> properties;
@Schema(description = "All child Entries of this Entry.", example = "[1, 2, 3]")
List<EntryData> children;
@Schema(description = "Describes the origin of the semantic node", example = "[ALGORITHM]")
Set<LayoutEngine> engines;
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[");
for (int i : treeId) {
sb.append(i);
sb.append(",");
}
sb.delete(sb.length() - 1, sb.length());
sb.append("]: ");
sb.append(type);
sb.append(" atbs = ");
sb.append(atomicBlockIds.length);
return sb.toString();
}
}
}

View File

@ -0,0 +1,36 @@
package com.knecon.fforesight.service.layoutparser.internal.api.data.redaction;
import java.io.Serializable;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
@Schema(description = "Object containing text information of a specific text block. A document is split into multiple text blocks, which are supposed to be read in order. Every text block can only occur on a single page.")
public class DocumentTextData implements Serializable {
@Schema(description = "Identifier of the text block.")
Long id;
@Schema(description = "The page the text block occurs on.")
Long page;
@Schema(description = "The text the text block.")
String searchText;
@Schema(description = "Each text block is assigned a number on a page, starting from 0.")
int numberOnPage;
@Schema(description = "The text blocks are ordered, this number represents the start of the text block as a string offset.")
int start;
@Schema(description = "The text blocks are ordered, this number represents the end of the text block as a string offset.")
int end;
@Schema(description = "The line breaks in the text of this semantic node in string offsets. They are exclusive end. At the end of each semantic node there is an implicit linebreak.", example = "[5, 10]")
int[] lineBreaks;
}

View File

@ -0,0 +1,7 @@
package com.knecon.fforesight.service.layoutparser.internal.api.data.redaction;
public enum LayoutEngine {
ALGORITHM,
AI,
OUTLINE
}

View File

@ -0,0 +1,23 @@
package com.knecon.fforesight.service.layoutparser.internal.api.data.redaction;
import java.io.Serializable;
import java.util.Locale;
public enum NodeType implements Serializable {
DOCUMENT,
SECTION,
SUPER_SECTION,
HEADLINE,
PARAGRAPH,
TABLE,
TABLE_CELL,
IMAGE,
HEADER,
FOOTER;
public String toString() {
return this.name().charAt(0) + this.name().substring(1).toLowerCase(Locale.ROOT);
}
}

View File

@ -21,14 +21,5 @@ public class SimplifiedText {
@Schema(description = "A List of simplified Sections, which contains almost exclusively text.") @Schema(description = "A List of simplified Sections, which contains almost exclusively text.")
@Builder.Default @Builder.Default
private List<SimplifiedSectionText> sectionTexts = new ArrayList<>(); private List<SimplifiedSectionText> sectionTexts = new ArrayList<>();
@Schema(description = "A list of the main section numbers ")
@Builder.Default
private List<String> mainSectionNumbers = new ArrayList<>();
@Schema(description = "A list of the header section numbers ")
@Builder.Default
private List<String> headerSectionNumbers = new ArrayList<>();
@Schema(description = "A list of the footer section numbers ")
@Builder.Default
private List<String> footerSectionNumbers = new ArrayList<>();
} }

View File

@ -8,20 +8,13 @@ import lombok.Builder;
@Builder @Builder
@Schema(description = "Object containing information about the layout parsing.") @Schema(description = "Object containing information about the layout parsing.")
public record LayoutParsingFinishedEvent( public record LayoutParsingFinishedEvent(
@Schema(description = "General purpose identifier. It is returned exactly the same way it is inserted with the LayoutParsingRequest.") // @Schema(description = "General purpose identifier. It is returned exactly the same way it is inserted with the LayoutParsingRequest.")
Map<String, String> identifier, Map<String, String> identifier,//
@Schema(description = "The duration of a single layout parsing in ms.")
@Schema(description = "The duration of a single layout parsing in ms.") // long duration,//
long duration, @Schema(description = "The number of pages of the parsed document.")
int numberOfPages,//
@Schema(description = "The number of pages of the parsed document.") // @Schema(description = "A general message. It contains some information useful for a developer, like the paths where the files are stored. Not meant to be machine readable.")
int numberOfPages, String message) {
@Schema(description = "A general message. It contains some information useful for a developer, like the paths where the files are stored. Not meant to be machine readable.") //
String message,
@Schema(description = "The app version of the layout parser.") //
String layoutParserVersion
) {
} }

View File

@ -2,9 +2,6 @@ package com.knecon.fforesight.service.layoutparser.internal.api.queue;
public class LayoutParsingQueueNames { public class LayoutParsingQueueNames {
public static final String LAYOUT_PARSING_REQUEST_QUEUE_PREFIX = "layout_parsing_request"; public static final String LAYOUT_PARSING_REQUEST_QUEUE = "layout_parsing_request_queue";
public static final String LAYOUT_PARSING_REQUEST_EXCHANGE = "layout_parsing_request_exchange"; public static final String LAYOUT_PARSING_FINISHED_EVENT_QUEUE = "layout_parsing_response_queue";
public static final String LAYOUT_PARSING_RESPONSE_QUEUE_PREFIX = "layout_parsing_response";
public static final String LAYOUT_PARSING_RESPONSE_EXCHANGE = "layout_parsing_response_exchange";
public static final String LAYOUT_PARSING_DLQ = "layout_parsing_error";
} }

View File

@ -8,20 +8,16 @@ description = "layoutparser-service-processor"
val jacksonVersion = "2.15.2" val jacksonVersion = "2.15.2"
val pdfBoxVersion = "3.0.0" val pdfBoxVersion = "3.0.0"
dependencies { dependencies {
implementation(project(":layoutparser-service-internal-api")) implementation(project(":layoutparser-service-internal-api"))
implementation(project(":viewer-doc-processor")) implementation(project(":viewer-doc-processor"))
implementation("com.knecon.fforesight:document:${rootProject.extra.get("documentVersion")}") implementation("com.iqser.red.service:persistence-service-shared-api-v1:2.144.0") {
implementation("com.iqser.red.service:persistence-service-shared-api-v1:2.564.0-RED9010.0") {
exclude("org.springframework.boot", "spring-boot-starter-security") exclude("org.springframework.boot", "spring-boot-starter-security")
exclude("org.springframework.boot", "spring-boot-starter-validation") exclude("org.springframework.boot", "spring-boot-starter-validation")
} }
implementation("com.knecon.fforesight:tenant-commons:0.30.0") { implementation("com.knecon.fforesight:tenant-commons:0.21.0")
exclude("com.iqser.red.commons", "storage-commons") implementation("com.iqser.red.commons:storage-commons:2.45.0")
}
implementation("com.iqser.red.commons:storage-commons:2.50.0")
implementation("org.apache.pdfbox:pdfbox:${pdfBoxVersion}") implementation("org.apache.pdfbox:pdfbox:${pdfBoxVersion}")
implementation("org.apache.pdfbox:pdfbox-tools:${pdfBoxVersion}") implementation("org.apache.pdfbox:pdfbox-tools:${pdfBoxVersion}")
@ -29,12 +25,9 @@ dependencies {
implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${jacksonVersion}") implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${jacksonVersion}")
implementation("org.springframework.boot:spring-boot-starter-web:3.1.3") implementation("org.springframework.boot:spring-boot-starter-web:3.1.3")
implementation("org.jgrapht:jgrapht-core:1.5.2") implementation("org.jgrapht:jgrapht-core:1.5.2")
implementation("org.apache.pdfbox:jbig2-imageio:3.0.4")
implementation("com.github.jai-imageio:jai-imageio-core:1.4.0")
implementation("com.github.jai-imageio:jai-imageio-jpeg2000:1.4.0")
implementation("org.tinspin:tinspin-indexes:2.1.3") implementation("org.tinspin:tinspin-indexes:2.1.3")
implementation("org.commonmark:commonmark:0.22.0") implementation("org.commonmark:commonmark:0.22.0")
implementation("org.commonmark:commonmark-ext-gfm-tables:0.22.0") implementation("org.commonmark:commonmark-ext-gfm-tables:0.22.0")
implementation("com.pdftron:PDFNet:10.11.0") implementation("com.pdftron:PDFNet:10.11.0")
implementation("org.apache.commons:commons-text:1.12.0")
} }

View File

@ -2,13 +2,12 @@ package com.knecon.fforesight.service.layoutparser.processor;
import static java.lang.String.format; import static java.lang.String.format;
import java.awt.geom.AffineTransform; import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D; import java.awt.geom.Rectangle2D;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -20,35 +19,28 @@ import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.common.PDRectangle; import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.documentinterchange.markedcontent.PDMarkedContent; import org.apache.pdfbox.pdmodel.documentinterchange.markedcontent.PDMarkedContent;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.iqser.red.service.redaction.v1.server.mapper.DocumentDataMapper; import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.ImageType;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.NodeType;
import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingFinishedEvent; import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingFinishedEvent;
import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingRequest; import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingRequest;
import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingType; import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingType;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.Character; import com.knecon.fforesight.service.layoutparser.processor.markdown.MarkdownMapper;
import com.knecon.fforesight.service.layoutparser.processor.model.DocumentWithVisualization;
import com.knecon.fforesight.service.layoutparser.processor.model.table.Ruling;
import com.knecon.fforesight.service.layoutparser.processor.model.text.RedTextPosition;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextDirection;
import com.knecon.fforesight.service.layoutparser.processor.services.classification.ClassificationService;
import com.knecon.fforesight.service.layoutparser.processor.services.mapper.MarkdownMapper;
import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationDocument; import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationDocument;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage; import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Document;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.ImageType;
import com.knecon.fforesight.service.layoutparser.processor.model.image.ClassifiedImage; import com.knecon.fforesight.service.layoutparser.processor.model.image.ClassifiedImage;
import com.knecon.fforesight.service.layoutparser.processor.model.outline.OutlineExtractorService; import com.knecon.fforesight.service.layoutparser.processor.model.outline.OutlineExtractorService;
import com.knecon.fforesight.service.layoutparser.processor.model.outline.SectionTreeBuilderService; import com.knecon.fforesight.service.layoutparser.processor.model.outline.OutlineObject;
import com.knecon.fforesight.service.layoutparser.processor.model.outline.SectionTreeEnhancementService; import com.knecon.fforesight.service.layoutparser.processor.model.outline.OutlineValidationService;
import com.knecon.fforesight.service.layoutparser.processor.model.outline.SectionTree; import com.knecon.fforesight.service.layoutparser.processor.model.outline.TOCEnrichmentService;
import com.knecon.fforesight.service.layoutparser.processor.model.outline.TableOfContents;
import com.knecon.fforesight.service.layoutparser.processor.model.table.Cell; import com.knecon.fforesight.service.layoutparser.processor.model.table.Cell;
import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings; import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import com.knecon.fforesight.service.layoutparser.processor.python_api.adapter.CvTableParsingAdapter; import com.knecon.fforesight.service.layoutparser.processor.python_api.adapter.CvTableParsingAdapter;
import com.knecon.fforesight.service.layoutparser.processor.python_api.adapter.ImageServiceResponseAdapter; import com.knecon.fforesight.service.layoutparser.processor.python_api.adapter.ImageServiceResponseAdapter;
import com.knecon.fforesight.service.layoutparser.processor.python_api.adapter.VisualLayoutParsingAdapter; import com.knecon.fforesight.service.layoutparser.processor.python_api.adapter.VisualLayoutParsingAdapter;
@ -56,6 +48,7 @@ import com.knecon.fforesight.service.layoutparser.processor.python_api.model.ima
import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.TableCells; import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.TableCells;
import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.TableServiceResponse; import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.TableServiceResponse;
import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.VisualLayoutParsingResponse; import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.VisualLayoutParsingResponse;
import com.knecon.fforesight.service.layoutparser.processor.services.BodyTextFrameService;
import com.knecon.fforesight.service.layoutparser.processor.services.RulingCleaningService; import com.knecon.fforesight.service.layoutparser.processor.services.RulingCleaningService;
import com.knecon.fforesight.service.layoutparser.processor.services.SectionsBuilderService; import com.knecon.fforesight.service.layoutparser.processor.services.SectionsBuilderService;
import com.knecon.fforesight.service.layoutparser.processor.services.SimplifiedSectionTextService; import com.knecon.fforesight.service.layoutparser.processor.services.SimplifiedSectionTextService;
@ -65,9 +58,13 @@ import com.knecon.fforesight.service.layoutparser.processor.services.blockificat
import com.knecon.fforesight.service.layoutparser.processor.services.blockification.DocstrumBlockificationService; import com.knecon.fforesight.service.layoutparser.processor.services.blockification.DocstrumBlockificationService;
import com.knecon.fforesight.service.layoutparser.processor.services.blockification.DocuMineBlockificationService; import com.knecon.fforesight.service.layoutparser.processor.services.blockification.DocuMineBlockificationService;
import com.knecon.fforesight.service.layoutparser.processor.services.blockification.RedactManagerBlockificationService; import com.knecon.fforesight.service.layoutparser.processor.services.blockification.RedactManagerBlockificationService;
import com.knecon.fforesight.service.layoutparser.processor.services.classification.ClarifyndClassificationService;
import com.knecon.fforesight.service.layoutparser.processor.services.classification.DocuMineClassificationService;
import com.knecon.fforesight.service.layoutparser.processor.services.classification.RedactManagerClassificationService;
import com.knecon.fforesight.service.layoutparser.processor.services.factory.DocumentGraphFactory; import com.knecon.fforesight.service.layoutparser.processor.services.factory.DocumentGraphFactory;
import com.knecon.fforesight.service.layoutparser.processor.services.graphics.Box; import com.knecon.fforesight.service.layoutparser.processor.services.graphics.Box;
import com.knecon.fforesight.service.layoutparser.processor.services.graphics.GraphicExtractorService; import com.knecon.fforesight.service.layoutparser.processor.services.graphics.GraphicExtractorService;
import com.knecon.fforesight.service.layoutparser.processor.services.mapper.DocumentDataMapper;
import com.knecon.fforesight.service.layoutparser.processor.services.mapper.TaasDocumentDataMapper; import com.knecon.fforesight.service.layoutparser.processor.services.mapper.TaasDocumentDataMapper;
import com.knecon.fforesight.service.layoutparser.processor.services.parsing.PDFLinesTextStripper; import com.knecon.fforesight.service.layoutparser.processor.services.parsing.PDFLinesTextStripper;
import com.knecon.fforesight.service.layoutparser.processor.services.visualization.LayoutGridService; import com.knecon.fforesight.service.layoutparser.processor.services.visualization.LayoutGridService;
@ -88,32 +85,32 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j @Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE) @FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class LayoutParsingPipeline { public class LayoutParsingPipeline {
final ImageServiceResponseAdapter imageServiceResponseAdapter; ImageServiceResponseAdapter imageServiceResponseAdapter;
final CvTableParsingAdapter cvTableParsingAdapter; CvTableParsingAdapter cvTableParsingAdapter;
final LayoutParsingStorageService layoutParsingStorageService; LayoutParsingStorageService layoutParsingStorageService;
final SectionsBuilderService sectionsBuilderService; SectionsBuilderService sectionsBuilderService;
final SimplifiedSectionTextService simplifiedSectionTextService; RedactManagerClassificationService redactManagerClassificationService;
final RulingCleaningService rulingCleaningService; DocuMineClassificationService docuMineClassificationService;
final TableExtractionService tableExtractionService; SimplifiedSectionTextService simplifiedSectionTextService;
final DocuMineBlockificationService docuMineBlockificationService; BodyTextFrameService bodyTextFrameService;
final RedactManagerBlockificationService redactManagerBlockificationService; RulingCleaningService rulingCleaningService;
final BlockificationPostprocessingService blockificationPostprocessingService; TableExtractionService tableExtractionService;
final DocstrumBlockificationService docstrumBlockificationService; DocuMineBlockificationService docuMineBlockificationService;
final LayoutGridService layoutGridService; RedactManagerBlockificationService redactManagerBlockificationService;
final ObservationRegistry observationRegistry; BlockificationPostprocessingService blockificationPostprocessingService;
final VisualLayoutParsingAdapter visualLayoutParsingAdapter; DocstrumBlockificationService docstrumBlockificationService;
final GraphicExtractorService graphicExtractorService; LayoutGridService layoutGridService;
final OutlineExtractorService outlineExtractorService; ObservationRegistry observationRegistry;
final SectionTreeBuilderService sectionTreeBuilderService; VisualLayoutParsingAdapter visualLayoutParsingAdapter;
final SectionTreeEnhancementService sectionTreeEnhancementService; ClarifyndClassificationService clarifyndClassificationService;
final LayoutParserSettings settings; GraphicExtractorService graphicExtractorService;
final ClassificationService classificationService; OutlineExtractorService outlineExtractorService;
OutlineValidationService outlineValidationService;
@Value("${LAYOUT_PARSER_VERSION:}") TOCEnrichmentService tocEnrichmentService;
private String layoutParserVersion; LayoutparserSettings settings;
public LayoutParsingFinishedEvent parseLayoutAndSaveFilesToStorage(LayoutParsingRequest layoutParsingRequest) throws IOException { public LayoutParsingFinishedEvent parseLayoutAndSaveFilesToStorage(LayoutParsingRequest layoutParsingRequest) throws IOException {
@ -122,23 +119,17 @@ public class LayoutParsingPipeline {
log.info("Starting layout parsing for {}", layoutParsingRequest.identifier()); log.info("Starting layout parsing for {}", layoutParsingRequest.identifier());
File originFile = layoutParsingStorageService.getOriginFile(layoutParsingRequest.originFileStorageId()); File originFile = layoutParsingStorageService.getOriginFile(layoutParsingRequest.originFileStorageId());
File viewerDocumentFile = layoutParsingStorageService.getViewerDocFile(layoutParsingRequest.viewerDocumentStorageId()) File viewerDocumentFile = layoutParsingStorageService.getViewerDocFile(layoutParsingRequest.viewerDocumentStorageId()).orElse(originFile);
.orElse(originFile);
VisualLayoutParsingResponse visualLayoutParsingResponse = layoutParsingRequest.visualLayoutParsingFileId() VisualLayoutParsingResponse visualLayoutParsingResponse = layoutParsingRequest.visualLayoutParsingFileId()
.map(layoutParsingStorageService::getVisualLayoutParsingFile) .map(layoutParsingStorageService::getVisualLayoutParsingFile).orElse(new VisualLayoutParsingResponse());
.orElse(new VisualLayoutParsingResponse());
ImageServiceResponse imageServiceResponse = layoutParsingRequest.imagesFileStorageId() ImageServiceResponse imageServiceResponse = layoutParsingRequest.imagesFileStorageId()
.map(layoutParsingStorageService::getImagesFile) .map(layoutParsingStorageService::getImagesFile).orElse(new ImageServiceResponse());
.orElse(new ImageServiceResponse());
TableServiceResponse tableServiceResponse = layoutParsingRequest.tablesFileStorageId() TableServiceResponse tableServiceResponse = layoutParsingRequest.tablesFileStorageId()
.map(layoutParsingStorageService::getTablesFile) .map(layoutParsingStorageService::getTablesFile).orElse(new TableServiceResponse());
.orElse(new TableServiceResponse());
LayoutParsingType layoutParsingType = settings.getLayoutParsingTypeOverride() == null // ClassificationDocument classificationDocument = parseLayout(settings.getLayoutParsingTypeOverride() == null //
? layoutParsingRequest.layoutParsingType() : settings.getLayoutParsingTypeOverride(); ? layoutParsingRequest.layoutParsingType() : settings.getLayoutParsingTypeOverride(),
ClassificationDocument classificationDocument = parseLayout(layoutParsingType,
originFile, originFile,
imageServiceResponse, imageServiceResponse,
tableServiceResponse, tableServiceResponse,
@ -147,37 +138,36 @@ public class LayoutParsingPipeline {
log.info("Building document graph for {}", layoutParsingRequest.identifier()); log.info("Building document graph for {}", layoutParsingRequest.identifier());
DocumentWithVisualization documentWithVisualization = observeBuildDocumentGraph(layoutParsingType, classificationDocument); Document documentGraph = observeBuildDocumentGraph(settings.getLayoutParsingTypeOverride() == null //
? layoutParsingRequest.layoutParsingType() : settings.getLayoutParsingTypeOverride(), classificationDocument);
log.info("Creating viewer document for {}", layoutParsingRequest.identifier()); log.info("Creating viewer document for {}", layoutParsingRequest.identifier());
layoutGridService.addLayoutGrid(viewerDocumentFile, documentWithVisualization, viewerDocumentFile, layoutParsingType, layoutParserVersion, false); layoutGridService.addLayoutGrid(viewerDocumentFile, documentGraph, viewerDocumentFile, false, layoutParsingRequest.visualLayoutParsingFileId().isPresent());
log.info("Storing resulting files for {}", layoutParsingRequest.identifier()); log.info("Storing resulting files for {}", layoutParsingRequest.identifier());
layoutParsingStorageService.storeDocumentData(layoutParsingRequest, DocumentDataMapper.toDocumentData(documentWithVisualization.document())); layoutParsingStorageService.storeDocumentData(layoutParsingRequest, DocumentDataMapper.toDocumentData(documentGraph));
if (layoutParsingRequest.documentMarkdownFileStorageId() if (layoutParsingRequest.documentMarkdownFileStorageId().isPresent()) {
.isPresent()) { layoutParsingStorageService.storeMarkdownFile(layoutParsingRequest.documentMarkdownFileStorageId().get(), new MarkdownMapper().toMarkdownContent(documentGraph));
layoutParsingStorageService.storeMarkdownFile(layoutParsingRequest.documentMarkdownFileStorageId()
.get(), new MarkdownMapper().toMarkdownContent(documentWithVisualization.document()));
} }
layoutParsingStorageService.storeSimplifiedText(layoutParsingRequest, simplifiedSectionTextService.toSimplifiedText(documentWithVisualization.document())); layoutParsingStorageService.storeSimplifiedText(layoutParsingRequest, simplifiedSectionTextService.toSimplifiedText(documentGraph));
layoutParsingStorageService.storeViewerDocument(layoutParsingRequest, viewerDocumentFile); layoutParsingStorageService.storeViewerDocument(layoutParsingRequest, viewerDocumentFile);
if (layoutParsingRequest.researchDocumentStorageId() != null) { if (layoutParsingRequest.researchDocumentStorageId() != null) {
log.info("Building research document data for {}", layoutParsingRequest.identifier()); log.info("Building research document data for {}", layoutParsingRequest.identifier());
var researchDocumentData = TaasDocumentDataMapper.fromDocument(documentWithVisualization.document()); var researchDocumentData = TaasDocumentDataMapper.fromDocument(documentGraph);
layoutParsingStorageService.storeResearchDocumentData(layoutParsingRequest, researchDocumentData); layoutParsingStorageService.storeResearchDocumentData(layoutParsingRequest, researchDocumentData);
} }
if (!viewerDocumentFile.equals(originFile)) { if (!viewerDocumentFile.equals(originFile)) {
assert !viewerDocumentFile.exists() || viewerDocumentFile.delete(); viewerDocumentFile.delete();
} }
assert !originFile.exists() || originFile.delete(); originFile.delete();
return LayoutParsingFinishedEvent.builder() return LayoutParsingFinishedEvent.builder()
.identifier(layoutParsingRequest.identifier()) .identifier(layoutParsingRequest.identifier())
.numberOfPages(documentWithVisualization.document().getNumberOfPages()) .numberOfPages(documentGraph.getNumberOfPages())
.duration(System.currentTimeMillis() - start) .duration(System.currentTimeMillis() - start)
.message(format(""" .message(format("""
Layout parsing has finished in %.02f s. Layout parsing has finished in %.02f s.
@ -192,22 +182,21 @@ public class LayoutParsingPipeline {
Viewer Doc: %s""", Viewer Doc: %s""",
((float) (System.currentTimeMillis() - start)) / 1000, ((float) (System.currentTimeMillis() - start)) / 1000,
layoutParsingRequest.identifier(), layoutParsingRequest.identifier(),
buildSemanticNodeCountMessage(documentWithVisualization.document().getNumberOfPages(), documentWithVisualization.buildSemanticNodeCounts()), buildSemanticNodeCountMessage(documentGraph.getNumberOfPages(), documentGraph.buildSemanticNodeCounts()),
layoutParsingRequest.structureFileStorageId(), layoutParsingRequest.structureFileStorageId(),
layoutParsingRequest.textBlockFileStorageId(), layoutParsingRequest.textBlockFileStorageId(),
layoutParsingRequest.positionBlockFileStorageId(), layoutParsingRequest.positionBlockFileStorageId(),
layoutParsingRequest.pageFileStorageId(), layoutParsingRequest.pageFileStorageId(),
layoutParsingRequest.simplifiedTextStorageId(), layoutParsingRequest.simplifiedTextStorageId(),
layoutParsingRequest.viewerDocumentStorageId())) layoutParsingRequest.viewerDocumentStorageId()))
.layoutParserVersion(layoutParserVersion)
.build(); .build();
} }
private DocumentWithVisualization observeBuildDocumentGraph(LayoutParsingType layoutParsingType, ClassificationDocument classificationDocument) { private Document observeBuildDocumentGraph(LayoutParsingType layoutParsingType, ClassificationDocument classificationDocument) {
AtomicReference<DocumentWithVisualization> documentReference = new AtomicReference<>(); AtomicReference<Document> documentReference = new AtomicReference<>();
Observation.createNotStarted("LayoutParsingPipeline", observationRegistry) Observation.createNotStarted("LayoutParsingPipeline", observationRegistry)
.contextualName("build-document-graph") .contextualName("build-document-graph")
@ -254,8 +243,12 @@ public class LayoutParsingPipeline {
} }
List<ClassificationPage> classificationPages = new ArrayList<>(); List<ClassificationPage> classificationPages = new ArrayList<>();
OutlineObject lastProcessedOutlineObject = null;
classificationDocument.setOutlineObjectTree(outlineExtractorService.getOutlineObjectTree(originDocument)); // parsing the structure elements could be useful as well
if (layoutParsingType != LayoutParsingType.REDACT_MANAGER_OLD && layoutParsingType != LayoutParsingType.DOCUMINE_OLD) {
classificationDocument.setOutlineObjectTree(outlineExtractorService.getOutlineObjectTree(originDocument));
}
long pageCount = originDocument.getNumberOfPages(); long pageCount = originDocument.getNumberOfPages();
@ -280,22 +273,22 @@ public class LayoutParsingPipeline {
stripper.setEndPage(pageNumber); stripper.setEndPage(pageNumber);
stripper.setPdpage(pdPage); stripper.setPdpage(pdPage);
stripper.getText(originDocument); stripper.getText(originDocument);
List<Word> words = stripper.getWords(); List<TextPositionSequence> words = stripper.getTextPositionSequences();
// rotateDirAdjExactly(words, pdPage); // works really well for many highly rotated documents (e.g. VV-331340.pdf), but it decreases the headline performance by 1.3%, so I am leaving it out for now
if (layoutParsingType.equals(LayoutParsingType.DOCUMINE_OLD)) { if (layoutParsingType.equals(LayoutParsingType.DOCUMINE_OLD)) {
var lines = TextPositionOperations.groupByLine(new HashSet<>(words)); var lines = TextPositionOperations.groupByLine(new HashSet<>(words));
classificationDocument.getLayoutDebugLayer().addLineVisualizationsFromNestedTextPosition(lines, pageNumber); classificationDocument.getLayoutDebugLayer().addLineVisualizationsFromNestedTextPosition(lines, pageNumber);
words = TextPositionOperations.sortWords(lines); words = TextPositionOperations.sortLines(lines);
} }
classificationDocument.getLayoutDebugLayer().addTextVisualizations(words, pageNumber); classificationDocument.getLayoutDebugLayer().addTextVisualizations(words, pageNumber);
PDRectangle pdr = pdPage.getMediaBox(); PDRectangle pdr = pdPage.getMediaBox();
List<Ruling> rulings = stripper.getRulings(); int rotation = pdPage.getRotation();
classificationDocument.getLayoutDebugLayer().addRulingVisualization(rulings, pageNumber); boolean isLandscape = pdr.getWidth() > pdr.getHeight() && (rotation == 0 || rotation == 180) || pdr.getHeight() > pdr.getWidth() && (rotation == 90 || rotation == 270);
CleanRulings cleanRulings = rulingCleaningService.deduplicateAndStraightenRulings(pdfTableCells.get(pageNumber), rulings);
PDRectangle cropbox = pdPage.getCropBox();
classificationDocument.getLayoutDebugLayer().addRulingVisualization(stripper.getRulings(), pageNumber);
CleanRulings cleanRulings = rulingCleaningService.deduplicateAndStraightenRulings(pdfTableCells.get(pageNumber), stripper.getRulings());
PageInformation pageInformation = PageInformation.fromPDPage(pageNumber, pdPage); PageInformation pageInformation = PageInformation.fromPDPage(pageNumber, pdPage);
List<Cell> emptyTableCells = TableExtractionService.findCells(cleanRulings.getHorizontals(), cleanRulings.getVerticals(), pageInformation); List<Cell> emptyTableCells = TableExtractionService.findCells(cleanRulings.getHorizontals(), cleanRulings.getVerticals(), pageInformation);
@ -303,7 +296,7 @@ public class LayoutParsingPipeline {
TextRulingsClassifier.classifyUnderlinedAndStrikethroughText(words, cleanRulings); TextRulingsClassifier.classifyUnderlinedAndStrikethroughText(words, cleanRulings);
List<Box> graphics = graphicExtractorService.extractPathElementGraphics(originDocument, pdPage, pageNumber, cleanRulings, stripper.getWords(), false); List<Box> graphics = graphicExtractorService.extractPathElementGraphics(originDocument, pdPage, pageNumber, cleanRulings, stripper.getTextPositionSequences(), false);
pdfImages.computeIfAbsent(pageNumber, x -> new ArrayList<>()) pdfImages.computeIfAbsent(pageNumber, x -> new ArrayList<>())
.addAll(graphics.stream() .addAll(graphics.stream()
@ -315,7 +308,8 @@ public class LayoutParsingPipeline {
.toList()); .toList());
ClassificationPage classificationPage = switch (layoutParsingType) { ClassificationPage classificationPage = switch (layoutParsingType) {
case REDACT_MANAGER_OLD -> redactManagerBlockificationService.blockify(stripper.getWords(), cleanRulings, classificationDocument.getLayoutDebugLayer()); case REDACT_MANAGER_OLD ->
redactManagerBlockificationService.blockify(stripper.getTextPositionSequences(), cleanRulings, classificationDocument.getLayoutDebugLayer());
case DOCUMINE_OLD -> docuMineBlockificationService.blockify(words, cleanRulings); case DOCUMINE_OLD -> docuMineBlockificationService.blockify(words, cleanRulings);
case DOCUMINE, REDACT_MANAGER, REDACT_MANAGER_PARAGRAPH_DEBUG, REDACT_MANAGER_WITHOUT_DUPLICATE_PARAGRAPH -> case DOCUMINE, REDACT_MANAGER, REDACT_MANAGER_PARAGRAPH_DEBUG, REDACT_MANAGER_WITHOUT_DUPLICATE_PARAGRAPH ->
docstrumBlockificationService.blockify(words, cleanRulings, true, classificationDocument.getLayoutDebugLayer(), layoutParsingType); docstrumBlockificationService.blockify(words, cleanRulings, true, classificationDocument.getLayoutDebugLayer(), layoutParsingType);
@ -323,9 +317,26 @@ public class LayoutParsingPipeline {
docstrumBlockificationService.blockify(words, cleanRulings, false, classificationDocument.getLayoutDebugLayer(), layoutParsingType); docstrumBlockificationService.blockify(words, cleanRulings, false, classificationDocument.getLayoutDebugLayer(), layoutParsingType);
}; };
updateClassificationPage(pdPage, pdr, classificationPage, cleanRulings, pageNumber, pageInformation); classificationPage.setCleanRulings(cleanRulings);
classificationPage.setRotation(rotation);
classificationPage.setLandscape(isLandscape);
classificationPage.setPageNumber(pageNumber);
classificationPage.setPageWidth(cropbox.getWidth());
classificationPage.setPageHeight(cropbox.getHeight());
blockificationPostprocessingService.findHeadlinesFromOutline(classificationDocument, pageNumber, classificationPage, pageInformation); if (layoutParsingType != LayoutParsingType.REDACT_MANAGER_OLD && layoutParsingType != LayoutParsingType.DOCUMINE_OLD) {
List<OutlineObject> outlineObjects = classificationDocument.getOutlineObjectTree().getOutlineObjectsPerPage().getOrDefault(pageNumber - 1, new ArrayList<>());
OutlineObject notFoundOutlineObject = null;
if (lastProcessedOutlineObject != null && !lastProcessedOutlineObject.isFound()) {
lastProcessedOutlineObject.setPoint(new Point2D.Float(0, cropbox.getHeight()));
notFoundOutlineObject = lastProcessedOutlineObject;
}
if (!outlineObjects.isEmpty()) {
classificationPage.setOutlineObjects(outlineObjects);
lastProcessedOutlineObject = blockificationPostprocessingService.sanitizeOutlineBlocks(classificationPage, notFoundOutlineObject);
}
}
classificationDocument.getLayoutDebugLayer().addMarkedContentVisualizations(stripper.getMarkedContents(), pageNumber); classificationDocument.getLayoutDebugLayer().addMarkedContentVisualizations(stripper.getMarkedContents(), pageNumber);
// MarkedContent needs to be converted at this point, otherwise it leads to GC Problems in Pdfbox. // MarkedContent needs to be converted at this point, otherwise it leads to GC Problems in Pdfbox.
@ -355,67 +366,40 @@ public class LayoutParsingPipeline {
originDocument.close(); originDocument.close();
classificationService.classify(classificationDocument, layoutParsingType, identifier); log.info("Calculating BodyTextFrame for {}", identifier);
bodyTextFrameService.setBodyTextFrames(classificationDocument, layoutParsingType);
for (ClassificationPage page : classificationDocument.getPages()) {
classificationDocument.getLayoutDebugLayer().addCleanRulingVisualization(page.getCleanRulings(), page.getPageNumber());
}
log.info("Classify TextBlocks for {}", identifier);
switch (layoutParsingType) {
case REDACT_MANAGER, REDACT_MANAGER_PARAGRAPH_DEBUG, REDACT_MANAGER_OLD, CLARIFYND_PARAGRAPH_DEBUG, REDACT_MANAGER_WITHOUT_DUPLICATE_PARAGRAPH ->
redactManagerClassificationService.classifyDocument(classificationDocument);
case DOCUMINE_OLD, DOCUMINE -> docuMineClassificationService.classifyDocument(classificationDocument);
case CLARIFYND -> clarifyndClassificationService.classifyDocument(classificationDocument);
}
SectionTree sectionTree = sectionTreeBuilderService.createSectionTree(classificationDocument); List<TextPageBlock> headlines = classificationDocument.getPages()
classificationDocument.setSectionTree(sectionTree); .stream()
.flatMap(classificationPage -> classificationPage.getTextBlocks()
.stream()
.filter(tb -> tb instanceof TextPageBlock && tb.getClassification() != null && tb.getClassification().isHeadline())
.map(tb -> (TextPageBlock) tb))
.toList();
TableOfContents tableOfContents = outlineValidationService.createToC(headlines);
classificationDocument.setTableOfContents(tableOfContents);
log.info("Building Sections for {}", identifier); log.info("Building Sections for {}", identifier);
switch (layoutParsingType) { switch (layoutParsingType) {
case CLARIFYND_PARAGRAPH_DEBUG, REDACT_MANAGER_PARAGRAPH_DEBUG -> sectionsBuilderService.buildParagraphDebugSections(classificationDocument); case CLARIFYND_PARAGRAPH_DEBUG, REDACT_MANAGER_PARAGRAPH_DEBUG -> sectionsBuilderService.buildParagraphDebugSections(classificationDocument);
default -> sectionTreeEnhancementService.assignSectionBlocksAndImages(classificationDocument); default -> tocEnrichmentService.assignSectionBlocksAndImages(classificationDocument);
} }
return classificationDocument; return classificationDocument;
} }
private static void updateClassificationPage(PDPage pdPage,
PDRectangle pdr,
ClassificationPage classificationPage,
CleanRulings cleanRulings,
int pageNumber,
PageInformation pageInformation) {
int rotation = pdPage.getRotation();
boolean isLandscape = pdr.getWidth() > pdr.getHeight() && (rotation == 0 || rotation == 180) || pdr.getHeight() > pdr.getWidth() && (rotation == 90 || rotation == 270);
classificationPage.setCleanRulings(cleanRulings);
classificationPage.setRotation(rotation);
classificationPage.setLandscape(isLandscape);
classificationPage.setPageNumber(pageNumber);
classificationPage.setPageWidth((float) pageInformation.width());
classificationPage.setPageHeight((float) pageInformation.height());
}
private static void rotateDirAdjExactly(List<Word> words, PDPage pdPage) {
for (TextDirection dir : TextDirection.values()) {
double averageRotation = words.stream()
.map(Word::getCharacters)
.flatMap(Collection::stream)
.map(Character::getTextPosition)
.filter(pos -> pos.getDir().equals(dir))
.mapToDouble(RedTextPosition::getExactDir).average()
.orElse(0);
if (averageRotation == 0) {
continue;
}
AffineTransform rotateInstance = AffineTransform.getRotateInstance(averageRotation, pdPage.getMediaBox().getWidth() / 2, pdPage.getMediaBox().getHeight() / 2);
for (Word word : words) {
if (!dir.equals(word.getDir())) {
continue;
}
word.transform(rotateInstance);
}
}
}
private void addNumberOfPagesToTrace(int numberOfPages, long size) { private void addNumberOfPagesToTrace(int numberOfPages, long size) {
if (observationRegistry.getCurrentObservation() != null) { if (observationRegistry.getCurrentObservation() != null) {
@ -457,10 +441,10 @@ public class LayoutParsingPipeline {
// Collect all statistics for the classificationPage, except from blocks inside tables, as tables will always be added to BodyTextFrame. // Collect all statistics for the classificationPage, except from blocks inside tables, as tables will always be added to BodyTextFrame.
for (AbstractPageBlock textBlock : classificationPage.getTextBlocks()) { for (AbstractPageBlock textBlock : classificationPage.getTextBlocks()) {
if (textBlock instanceof TextPageBlock) { if (textBlock instanceof TextPageBlock) {
if (((TextPageBlock) textBlock).getWords() == null) { if (((TextPageBlock) textBlock).getSequences() == null) {
continue; continue;
} }
for (Word word : ((TextPageBlock) textBlock).getWords()) { for (TextPositionSequence word : ((TextPageBlock) textBlock).getSequences()) {
classificationPage.getTextHeightCounter().add(word.getTextHeight()); classificationPage.getTextHeightCounter().add(word.getTextHeight());
classificationPage.getFontCounter().add(word.getFont()); classificationPage.getFontCounter().add(word.getFont());
classificationPage.getFontSizeCounter().add(word.getFontSize()); classificationPage.getFontSizeCounter().add(word.getFontSize());

View File

@ -11,14 +11,12 @@ import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.nio.file.StandardOpenOption; import java.nio.file.StandardOpenOption;
import java.util.Optional; import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import org.springframework.core.task.TaskExecutor;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.redaction.v1.server.data.DocumentData;
import com.iqser.red.storage.commons.service.StorageService; import com.iqser.red.storage.commons.service.StorageService;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentData;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.SimplifiedText; import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.SimplifiedText;
import com.knecon.fforesight.service.layoutparser.internal.api.data.taas.ResearchDocumentData; import com.knecon.fforesight.service.layoutparser.internal.api.data.taas.ResearchDocumentData;
import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingRequest; import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingRequest;
@ -41,8 +39,6 @@ public class LayoutParsingStorageService {
private final StorageService storageService; private final StorageService storageService;
private final ObjectMapper objectMapper; private final ObjectMapper objectMapper;
private final TaskExecutor taskExecutor;
@Observed(name = "LayoutParsingStorageService", contextualName = "get-origin-file") @Observed(name = "LayoutParsingStorageService", contextualName = "get-origin-file")
public File getOriginFile(String storageId) throws IOException { public File getOriginFile(String storageId) throws IOException {
@ -104,35 +100,13 @@ public class LayoutParsingStorageService {
} }
@SneakyThrows
@Observed(name = "LayoutParsingStorageService", contextualName = "store-document-data") @Observed(name = "LayoutParsingStorageService", contextualName = "store-document-data")
public void storeDocumentData(LayoutParsingRequest layoutParsingRequest, DocumentData documentData) { public void storeDocumentData(LayoutParsingRequest layoutParsingRequest, DocumentData documentData) {
Runnable storeDocumentStructureRunnable = () -> storageService.storeProtoObject(TenantContext.getTenantId(), storageService.storeJSONObject(TenantContext.getTenantId(), layoutParsingRequest.structureFileStorageId(), documentData.getDocumentStructure());
layoutParsingRequest.structureFileStorageId(), storageService.storeJSONObject(TenantContext.getTenantId(), layoutParsingRequest.textBlockFileStorageId(), documentData.getDocumentTextData());
documentData.getDocumentStructure()); storageService.storeJSONObject(TenantContext.getTenantId(), layoutParsingRequest.positionBlockFileStorageId(), documentData.getDocumentPositions());
storageService.storeJSONObject(TenantContext.getTenantId(), layoutParsingRequest.pageFileStorageId(), documentData.getDocumentPages());
CompletableFuture<Void> storeDocumentStructureFuture = CompletableFuture.runAsync(storeDocumentStructureRunnable, taskExecutor);
Runnable storeDocumentTextDataRunnable = () -> storageService.storeProtoObject(TenantContext.getTenantId(),
layoutParsingRequest.textBlockFileStorageId(),
documentData.getDocumentTextData());
CompletableFuture<Void> storeDocumentTextDataFuture = CompletableFuture.runAsync(storeDocumentTextDataRunnable, taskExecutor);
Runnable storeDocumentPositionsRunnable = () -> storageService.storeProtoObject(TenantContext.getTenantId(),
layoutParsingRequest.positionBlockFileStorageId(),
documentData.getDocumentPositionData());
CompletableFuture<Void> storeDocumentPositionsFuture = CompletableFuture.runAsync(storeDocumentPositionsRunnable, taskExecutor);
Runnable storeDocumentPagesRunnable = () -> storageService.storeProtoObject(TenantContext.getTenantId(),
layoutParsingRequest.pageFileStorageId(),
documentData.getDocumentPages());
CompletableFuture<Void> storeDocumentPagesFuture = CompletableFuture.runAsync(storeDocumentPagesRunnable, taskExecutor);
CompletableFuture.allOf(storeDocumentStructureFuture, storeDocumentTextDataFuture, storeDocumentPositionsFuture, storeDocumentPagesFuture).join();
} }

View File

@ -13,8 +13,9 @@ import lombok.experimental.FieldDefaults;
@Configuration @Configuration
@ConfigurationProperties("layoutparser") @ConfigurationProperties("layoutparser")
@FieldDefaults(level = AccessLevel.PRIVATE) @FieldDefaults(level = AccessLevel.PRIVATE)
public class LayoutParserSettings { public class LayoutparserSettings {
boolean debug; boolean debug;
LayoutParsingType layoutParsingTypeOverride; LayoutParsingType layoutParsingTypeOverride;
String pdftronLicense;
} }

View File

@ -18,7 +18,7 @@ import com.knecon.fforesight.service.layoutparser.processor.docstrum.service.Zon
import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings; import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings;
import com.knecon.fforesight.service.layoutparser.processor.model.text.RedTextPosition; import com.knecon.fforesight.service.layoutparser.processor.model.text.RedTextPosition;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextDirection; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextDirection;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer; import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@ -35,7 +35,7 @@ public class DocstrumSegmentationService {
private final ReadingOrderService readingOrderService; private final ReadingOrderService readingOrderService;
public List<Zone> segmentPage(List<Word> textPositions, boolean xyOrder, CleanRulings usedRulings, LayoutDebugLayer visualizations) { public List<Zone> segmentPage(List<TextPositionSequence> textPositions, boolean xyOrder, CleanRulings usedRulings, LayoutDebugLayer visualizations) {
EnumMap<TextDirection, Integer> directionCounts = new EnumMap<>(TextDirection.class); EnumMap<TextDirection, Integer> directionCounts = new EnumMap<>(TextDirection.class);
@ -78,14 +78,18 @@ public class DocstrumSegmentationService {
} }
private List<Zone> computeZones(List<Word> textPositions, CleanRulings rulings, LayoutDebugLayer visualizations, TextDirection direction) { private List<Zone> computeZones(List<TextPositionSequence> textPositions, CleanRulings rulings, LayoutDebugLayer visualizations, TextDirection direction) {
List<Character> characters = textPositions.stream() List<RedTextPosition> positions = textPositions.stream()
.filter(t -> t.getDir() == direction) .filter(t -> t.getDir() == direction)
.map(Word::getCharacters) .map(TextPositionSequence::getTextPositions)
.flatMap(List::stream) .flatMap(List::stream)
.toList(); .toList();
List<Character> characters = positions.stream()
.map(Character::new)
.collect(Collectors.toList());
nearestNeighbourService.findNearestNeighbors(characters); nearestNeighbourService.findNearestNeighbors(characters);
double characterSpacing = spacingService.computeCharacterSpacing(characters); double characterSpacing = spacingService.computeCharacterSpacing(characters);

View File

@ -133,7 +133,7 @@ public abstract class BoundingBox {
} }
public boolean intersectsX(BoundingBox other, float threshold) { private boolean intersectsX(BoundingBox other, float threshold) {
return this.getX() - threshold <= other.getMaxX() && this.getMaxX() + threshold >= other.getX(); return this.getX() - threshold <= other.getMaxX() && this.getMaxX() + threshold >= other.getX();
} }
@ -225,31 +225,33 @@ public abstract class BoundingBox {
public double horizontalDistance(BoundingBox other) { public double horizontalDistance(BoundingBox other) {
double rect1Right = getMaxX(); Rectangle2D left;
double rect1Left = getMinX(); Rectangle2D right;
double rect2Right = other.getMaxX(); if (this.leftOf(other)) {
double rect2Left = other.getMinX(); left = this.getBBox();
right = other.getBBox();
if (rect1Left > rect2Right || rect2Left > rect1Right) {
return Math.max(rect2Left - rect1Right, rect1Left - rect2Right);
} else { } else {
return 0; left = other.getBBox();
right = this.getBBox();
} }
return Math.max(0, right.getMinX() - left.getMaxX());
} }
public double verticalDistance(BoundingBox other) { public double verticalDistance(BoundingBox other) {
double rect1Top = getMaxY(); Rectangle2D bottom;
double rect1Bottom = getMinY(); Rectangle2D top;
double rect2Top = other.getMaxY(); if (this.isAbove(other)) {
double rect2Bottom = other.getMinY(); top = this.getBBox();
bottom = other.getBBox();
if (rect1Bottom > rect2Top || rect2Bottom > rect1Top) {
return Math.max(rect2Bottom - rect1Top, rect1Bottom - rect2Top);
} else { } else {
return 0; bottom = this.getBBox();
top = other.getBBox();
} }
return Math.max(0, bottom.getMinY() - top.getMaxY());
} }

View File

@ -1,9 +1,9 @@
package com.knecon.fforesight.service.layoutparser.processor.docstrum.model; package com.knecon.fforesight.service.layoutparser.processor.docstrum.model;
import static com.knecon.fforesight.service.layoutparser.processor.model.text.Word.BOLD; import static com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence.BOLD;
import static com.knecon.fforesight.service.layoutparser.processor.model.text.Word.BOLD_ITALIC; import static com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence.BOLD_ITALIC;
import static com.knecon.fforesight.service.layoutparser.processor.model.text.Word.ITALIC; import static com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence.ITALIC;
import static com.knecon.fforesight.service.layoutparser.processor.model.text.Word.STANDARD; import static com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence.STANDARD;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -14,7 +14,7 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.utils.FastAtan2; import com.knecon.fforesight.service.layoutparser.processor.docstrum.utils.FastAtan2;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import com.knecon.fforesight.service.layoutparser.processor.model.text.FontStyle; import com.knecon.fforesight.service.layoutparser.processor.model.text.FontStyle;
import lombok.Data; import lombok.Data;
@ -24,7 +24,7 @@ import lombok.EqualsAndHashCode;
@EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false)
public class Line extends TextBoundingBox { public class Line extends TextBoundingBox {
private static final double WORD_DISTANCE_MULTIPLIER = 0.17; private static final double WORD_DISTANCE_MULTIPLIER = 0.18;
@EqualsAndHashCode.Include @EqualsAndHashCode.Include
private final double x0; private final double x0;
@ -36,13 +36,18 @@ public class Line extends TextBoundingBox {
@EqualsAndHashCode.Include @EqualsAndHashCode.Include
private final double y1; private final double y1;
private final double height;
private FontStyle fontStyle; private FontStyle fontStyle;
private final List<Word> words; private final List<Character> characters;
private final List<TextPositionSequence> words = new ArrayList<>();
public Line(List<Character> characters, double wordSpacing) { public Line(List<Character> characters, double wordSpacing) {
this.characters = characters;
if (characters.size() >= 2) { if (characters.size() >= 2) {
// linear regression // linear regression
double sx = 0.0; double sx = 0.0;
@ -71,32 +76,20 @@ public class Line extends TextBoundingBox {
this.y0 = character.getY() - dy; this.y0 = character.getY() - dy;
this.y1 = character.getY() + dy; this.y1 = character.getY() + dy;
} }
this.words = new ArrayList<>(); height = computeHeight();
computeWords(characters, wordSpacing * WORD_DISTANCE_MULTIPLIER); computeWords(wordSpacing * WORD_DISTANCE_MULTIPLIER);
buildBBox(); buildBBox();
computeFontStyle(); computeFontStyle();
} }
public Line(List<Word> words) {
this.words = words;
buildBBox();
x0 = getMinX();
y0 = getMinY();
x1 = getMaxX();
y1 = getMaxY();
computeFontStyle();
}
private void computeFontStyle() { private void computeFontStyle() {
EnumMap<FontStyle, AtomicInteger> fontStyleCounter = new EnumMap<>(FontStyle.class); EnumMap<FontStyle, AtomicInteger> fontStyleCounter = new EnumMap<>(FontStyle.class);
for (FontStyle fontStyle : FontStyle.values()) { for (FontStyle fontStyle : FontStyle.values()) {
fontStyleCounter.put(fontStyle, new AtomicInteger(0)); fontStyleCounter.put(fontStyle, new AtomicInteger(0));
} }
for (Word word : words) { for (TextPositionSequence word : words) {
switch (word.getFontStyle()) { switch (word.getFontStyle()) {
case STANDARD -> fontStyleCounter.get(FontStyle.REGULAR).getAndIncrement(); case STANDARD -> fontStyleCounter.get(FontStyle.REGULAR).getAndIncrement();
case BOLD -> fontStyleCounter.get(FontStyle.BOLD).getAndIncrement(); case BOLD -> fontStyleCounter.get(FontStyle.BOLD).getAndIncrement();
@ -107,7 +100,8 @@ public class Line extends TextBoundingBox {
fontStyle = fontStyleCounter.entrySet() fontStyle = fontStyleCounter.entrySet()
.stream() .stream()
.max(Comparator.comparing(entry -> entry.getValue().get())) .max(Comparator.comparing(entry -> entry.getValue().get()))
.map(Map.Entry::getKey).orElse(FontStyle.REGULAR); .map(Map.Entry::getKey)
.orElse(FontStyle.REGULAR);
} }
@ -123,6 +117,14 @@ public class Line extends TextBoundingBox {
} }
private double computeHeight() {
return characters.stream()
.map(Character::getHeight)
.reduce(0d, Double::sum) / characters.size();
}
public double angularDifference(Line j) { public double angularDifference(Line j) {
double diff = Math.abs(getAngle() - j.getAngle()); double diff = Math.abs(getAngle() - j.getAngle());
@ -155,22 +157,19 @@ public class Line extends TextBoundingBox {
} }
private void computeWords(List<Character> characters, double wordSpacing) { private void computeWords(double wordSpacing) {
// Imo, the width of space should be scaled with the font size, but it only depends on the median distance between horizontal neighbours. TextPositionSequence word = new TextPositionSequence();
// If there are large differences in fontsize on a page, this might lead to missing spaces for the smaller fonts and too many for larger fonts.
// I've just now changed the scaling factor. If you come across this comment with missing whitespaces again, try scaling the fontsize instead of simply changing the factor again.
Word word = new Word();
Character previous = null; Character previous = null;
for (Character current : characters) { for (Character current : characters) {
if (previous != null) { if (previous != null) {
double dist = current.getTextPosition().getXDirAdj() - previous.getTextPosition().getXDirAdj() - previous.getTextPosition().getWidthDirAdj(); double dist = current.getTextPosition().getXDirAdj() - previous.getTextPosition().getXDirAdj() - previous.getTextPosition().getWidthDirAdj();
if (dist > wordSpacing) { if (dist > wordSpacing) {
words.add(word); words.add(word);
word = new Word(); word = new TextPositionSequence();
} }
} }
word.add(current); word.getTextPositions().add(current.getTextPosition());
previous = current; previous = current;
} }
words.add(word); words.add(word);
@ -179,7 +178,9 @@ public class Line extends TextBoundingBox {
private void buildBBox() { private void buildBBox() {
this.setToBBoxOfComponents(words); this.setToBBoxOfComponents(characters.stream()
.map(Character::getTextPosition)
.toList());
} }

View File

@ -99,82 +99,4 @@ public abstract class TextBoundingBox extends BoundingBox {
return this.bBoxDirAdj.getCenterX(); return this.bBoxDirAdj.getCenterX();
} }
public double horizontalDistanceDirAdj(TextBoundingBox other) {
double rect1Right = getMaxXDirAdj();
double rect1Left = getXDirAdj();
double rect2Right = other.getMaxXDirAdj();
double rect2Left = other.getXDirAdj();
if (rect1Left > rect2Right || rect2Left > rect1Right) {
return Math.max(rect2Left - rect1Right, rect1Left - rect2Right);
} else {
return 0;
}
}
public double verticalDistanceDirAdj(TextBoundingBox other) {
double rect1Top = getMaxYDirAdj();
double rect1Bottom = getYDirAdj();
double rect2Top = other.getMaxYDirAdj();
double rect2Bottom = other.getYDirAdj();
if (rect1Bottom > rect2Top || rect2Bottom > rect1Top) {
return Math.max(rect2Bottom - rect1Top, rect1Bottom - rect2Top);
} else {
return 0;
}
}
public boolean intersectsDirAdj(TextBoundingBox other) {
return this.intersectsXDirAdj(other) && this.intersectsYDirAdj(other);
}
public boolean intersectsDirAdj(TextBoundingBox other, float yThreshold, float xThreshold) {
return this.intersectsXDirAdj(other, xThreshold) && this.intersectsYDirAdj(other, yThreshold);
}
public boolean intersectsXDirAdj(TextBoundingBox other, float threshold) {
return this.getXDirAdj() - threshold <= other.getMaxXDirAdj() && this.getMaxXDirAdj() + threshold >= other.getXDirAdj();
}
public boolean intersectsXDirAdj(TextBoundingBox other) {
return this.getXDirAdj() <= other.getMaxXDirAdj() && this.getMaxXDirAdj() >= other.getXDirAdj();
}
public boolean intersectsYDirAdj(TextBoundingBox other) {
return this.getYDirAdj() <= other.getMaxYDirAdj() && this.getMaxYDirAdj() >= other.getYDirAdj();
}
public boolean intersectsYDirAdj(TextBoundingBox other, float threshold) {
return this.getYDirAdj() - threshold <= other.getMaxYDirAdj() && this.getMaxYDirAdj() + threshold >= other.getYDirAdj();
}
public boolean isAboveDirAdj(TextBoundingBox other) {
return other.isBelow(this);
}
public boolean isBelowDirAdj(TextBoundingBox other) {
return this.intersectsXDirAdj(other) && this.getYDirAdj() >= other.getMaxYDirAdj();
}
} }

View File

@ -28,10 +28,4 @@ public class UnionFind<T> extends org.jgrapht.alg.util.UnionFind<T> {
return setRep.values(); return setRep.values();
} }
public Collection<T> getElements() {
return getParentMap().keySet();
}
} }

View File

@ -18,6 +18,7 @@ public class Zone extends TextBoundingBox {
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod") @SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public Zone(List<Line> lines) { public Zone(List<Line> lines) {
lines.sort(Comparator.comparingDouble(Line::getY0));
this.lines = lines; this.lines = lines;
setToBBoxOfComponents(lines); setToBBoxOfComponents(lines);
} }

View File

@ -17,7 +17,7 @@ public class LineBuilderService {
private static final double CHARACTER_SPACING_DISTANCE_MULTIPLIER = 3.5; private static final double CHARACTER_SPACING_DISTANCE_MULTIPLIER = 3.5;
private static final double LINE_SPACING_THRESHOLD_MULTIPLIER = 0.67; private static final double LINE_SPACING_THRESHOLD_MULTIPLIER = 0.67;
private static final double ANGLE_TOLERANCE = Math.toRadians(5); private static final double ANGLE_TOLERANCE = Math.PI / 6;
public List<Line> buildLines(List<Character> characters, double characterSpacing, double lineSpacing, CleanRulings rulings) { public List<Line> buildLines(List<Character> characters, double characterSpacing, double lineSpacing, CleanRulings rulings) {

View File

@ -1,7 +1,9 @@
package com.knecon.fforesight.service.layoutparser.processor.docstrum.service; package com.knecon.fforesight.service.layoutparser.processor.docstrum.service;
import static com.knecon.fforesight.service.layoutparser.processor.model.SectionIdentifier.numericalIdentifierPattern;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Comparator;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -9,12 +11,11 @@ import java.util.stream.Collectors;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.Character;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.Line; import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.Line;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.UnionFind; import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.UnionFind;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.Zone; import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.Zone;
import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings; import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word;
import com.knecon.fforesight.service.layoutparser.processor.utils.TextPositionOperations;
@Service @Service
public class ZoneBuilderService { public class ZoneBuilderService {
@ -30,7 +31,7 @@ public class ZoneBuilderService {
private static final double MAX_LINE_SIZE_SCALE = 2.5; private static final double MAX_LINE_SIZE_SCALE = 2.5;
private static final double ANGLE_TOLERANCE = Math.toRadians(5); private static final double ANGLE_TOLERANCE = Math.PI / 6;
private static final double MAX_VERTICAL_MERGE_DISTANCE = 0.5; private static final double MAX_VERTICAL_MERGE_DISTANCE = 0.5;
@ -113,14 +114,64 @@ public class ZoneBuilderService {
private Zone mergeLinesInZone(List<Line> lines, double characterSpacing, double lineSpacing) { private Zone mergeLinesInZone(List<Line> lines, double characterSpacing, double lineSpacing) {
Set<Word> words = lines.stream() double maxHorizontalDistance = 0;
.map(Line::getWords) double minVerticalDistance = 0;
.flatMap(Collection::stream) double maxVerticalDistance = lineSpacing * MAX_VERTICAL_MERGE_DISTANCE;
.collect(Collectors.toSet());
Collection<Set<Word>> groupedLines = TextPositionOperations.groupByLine(words);
List<Line> sortedLines = TextPositionOperations.sortLines(groupedLines); UnionFind<Line> unionFind = new UnionFind<>(new HashSet<>(lines));
return new Zone(sortedLines);
lines.forEach(outer -> {
lines.forEach(inner -> {
if (inner == outer) {
return;
}
double horizontalDistance = outer.horizontalDistance(inner);
double verticalDistance = outer.verticalDistance(inner);
if (horizontalDistance <= maxHorizontalDistance && minVerticalDistance <= verticalDistance && verticalDistance <= maxVerticalDistance) {
unionFind.union(outer, inner);
} else if (minVerticalDistance <= verticalDistance
&& verticalDistance <= maxVerticalDistance
&& Math.abs(horizontalDistance - Math.min(outer.getLength(), inner.getLength())) < 0.1) {
boolean characterOverlap = false;
int overlappingCount = 0;
for (Character outerCharacter : outer.getCharacters()) {
for (Character innerCharacter : inner.getCharacters()) {
double characterOverlapDistance = outerCharacter.overlappingDistance(innerCharacter);
if (characterOverlapDistance > 2) {
characterOverlap = true;
}
if (characterOverlapDistance > 0) {
overlappingCount++;
}
}
}
if (!characterOverlap && overlappingCount <= 2) {
unionFind.union(outer, inner);
}
}
});
});
List<Line> outputZone = new ArrayList<>();
for (Set<Line> group : unionFind.getGroups()) {
List<Character> characters = new ArrayList<>();
for (Line line : group) {
characters.addAll(line.getCharacters());
}
characters.sort(Comparator.comparingDouble(Character::getX));
outputZone.add(new Line(characters, characterSpacing));
}
return new Zone(outputZone.stream()
.sorted(Comparator.comparing(Line::getY0))
.collect(Collectors.toList()));
} }
} }

View File

@ -1,4 +1,4 @@
package com.knecon.fforesight.service.layoutparser.processor.services.mapper; package com.knecon.fforesight.service.layoutparser.processor.markdown;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
@ -26,12 +26,12 @@ import org.commonmark.node.StrongEmphasis;
import org.commonmark.node.Text; import org.commonmark.node.Text;
import org.commonmark.renderer.markdown.MarkdownRenderer; import org.commonmark.renderer.markdown.MarkdownRenderer;
import com.iqser.red.service.redaction.v1.server.model.document.AbstractNodeVisitor; import com.knecon.fforesight.service.layoutparser.processor.model.graph.AbstractNodeVisitor;
import com.iqser.red.service.redaction.v1.server.model.document.TextRange; import com.knecon.fforesight.service.layoutparser.processor.model.graph.TextRange;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Headline; import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Headline;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.SemanticNode; import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.SemanticNode;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Table; import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Table;
import com.iqser.red.service.redaction.v1.server.model.document.textblock.TextBlock; import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.text.FontStyle; import com.knecon.fforesight.service.layoutparser.processor.model.text.FontStyle;
public class MarkdownMapper extends AbstractNodeVisitor { public class MarkdownMapper extends AbstractNodeVisitor {
@ -39,7 +39,7 @@ public class MarkdownMapper extends AbstractNodeVisitor {
Document markdownDocument = new Document(); Document markdownDocument = new Document();
public String toMarkdownContent(com.iqser.red.service.redaction.v1.server.model.document.nodes.Document document) { public String toMarkdownContent(com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Document document) {
visit(document); visit(document);
@ -55,7 +55,7 @@ public class MarkdownMapper extends AbstractNodeVisitor {
@Override @Override
public void visit(com.iqser.red.service.redaction.v1.server.model.document.nodes.Paragraph paragraph) { public void visit(com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Paragraph paragraph) {
markdownDocument.appendChild(parseParagraph(paragraph)); markdownDocument.appendChild(parseParagraph(paragraph));
} }
@ -108,7 +108,7 @@ public class MarkdownMapper extends AbstractNodeVisitor {
} }
private Node createTableCell(com.iqser.red.service.redaction.v1.server.model.document.nodes.TableCell tc) { private Node createTableCell(com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.TableCell tc) {
var cell = new TableCell(); var cell = new TableCell();
List<SemanticNode> childNodes = tc.streamChildren() List<SemanticNode> childNodes = tc.streamChildren()
@ -122,9 +122,9 @@ public class MarkdownMapper extends AbstractNodeVisitor {
} }
private Paragraph parseParagraph(com.iqser.red.service.redaction.v1.server.model.document.nodes.Paragraph paragraph) { private Paragraph parseParagraph(com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Paragraph paragraph) {
Paragraph markdownParagraph = new org.commonmark.node.Paragraph(); org.commonmark.node.Paragraph markdownParagraph = new org.commonmark.node.Paragraph();
parseTextBlock(paragraph.getTextBlock(), true).forEach(markdownParagraph::appendChild); parseTextBlock(paragraph.getTextBlock(), true).forEach(markdownParagraph::appendChild);
return markdownParagraph; return markdownParagraph;
} }
@ -230,12 +230,12 @@ public class MarkdownMapper extends AbstractNodeVisitor {
int start = textBlock.getTextRange().start(); int start = textBlock.getTextRange().start();
int end = textBlock.getTextRange().end(); int end = textBlock.getTextRange().end();
for (TextRange bold : textBlock.getBoldTextRanges()) { for (TextRange bold : textBlock.getBoldTextBoundaries()) {
styleChanges.computeIfAbsent(bold.start() + start, k -> new HashSet<>()).add(FontStyleChange.enter(FontStyle.BOLD)); styleChanges.computeIfAbsent(bold.start() + start, k -> new HashSet<>()).add(FontStyleChange.enter(FontStyle.BOLD));
styleChanges.computeIfAbsent(bold.end() + start, k -> new HashSet<>()).add(FontStyleChange.leave(FontStyle.BOLD)); styleChanges.computeIfAbsent(bold.end() + start, k -> new HashSet<>()).add(FontStyleChange.leave(FontStyle.BOLD));
} }
for (TextRange italic : textBlock.getItalicTextRanges()) { for (TextRange italic : textBlock.getItalicTextBoundaries()) {
styleChanges.computeIfAbsent(italic.start() + start, k -> new HashSet<>()).add(FontStyleChange.enter(FontStyle.ITALIC)); styleChanges.computeIfAbsent(italic.start() + start, k -> new HashSet<>()).add(FontStyleChange.enter(FontStyle.ITALIC));
styleChanges.computeIfAbsent(italic.end() + start, k -> new HashSet<>()).add(FontStyleChange.leave(FontStyle.ITALIC)); styleChanges.computeIfAbsent(italic.end() + start, k -> new HashSet<>()).add(FontStyleChange.leave(FontStyle.ITALIC));
} }
@ -298,6 +298,7 @@ public class MarkdownMapper extends AbstractNodeVisitor {
} }
record FontStyleChange(boolean enter, FontStyle style) { record FontStyleChange(boolean enter, FontStyle style) {
public static FontStyleChange enter(FontStyle style) { public static FontStyleChange enter(FontStyle style) {
@ -322,5 +323,4 @@ public class MarkdownMapper extends AbstractNodeVisitor {
record TextRangeWithTextType(TextRange textRange, FontStyle fontStyle) { record TextRangeWithTextType(TextRange textRange, FontStyle fontStyle) {
} }
} }

View File

@ -4,7 +4,7 @@ import java.util.HashSet;
import java.util.Set; import java.util.Set;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.LayoutEngine; import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.LayoutEngine;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.BoundingBox; import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.BoundingBox;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;

View File

@ -4,7 +4,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.model.outline.OutlineObjectTree; import com.knecon.fforesight.service.layoutparser.processor.model.outline.OutlineObjectTree;
import com.knecon.fforesight.service.layoutparser.processor.model.outline.SectionTree; import com.knecon.fforesight.service.layoutparser.processor.model.outline.TableOfContents;
import com.knecon.fforesight.service.layoutparser.processor.model.text.StringFrequencyCounter; import com.knecon.fforesight.service.layoutparser.processor.model.text.StringFrequencyCounter;
import com.knecon.fforesight.service.layoutparser.processor.model.text.UnclassifiedText; import com.knecon.fforesight.service.layoutparser.processor.model.text.UnclassifiedText;
import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer; import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer;
@ -31,6 +31,6 @@ public class ClassificationDocument {
private long rulesVersion; private long rulesVersion;
private OutlineObjectTree outlineObjectTree; private OutlineObjectTree outlineObjectTree;
private SectionTree sectionTree; private TableOfContents tableOfContents;
} }

View File

@ -1,19 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.model;
import java.util.Map;
import java.util.stream.Collectors;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Document;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.NodeType;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.SemanticNode;
import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer;
public record DocumentWithVisualization(Document document, LayoutDebugLayer layoutDebugLayer) {
public Map<NodeType, Long> buildSemanticNodeCounts() {
return document.streamAllSubNodes()
.collect(Collectors.groupingBy(SemanticNode::getType, Collectors.counting()));
}
}

View File

@ -1,5 +1,6 @@
package com.knecon.fforesight.service.layoutparser.processor.model; package com.knecon.fforesight.service.layoutparser.processor.model;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -12,14 +13,10 @@ import lombok.Getter;
public class FloatFrequencyCounter { public class FloatFrequencyCounter {
Map<Double, Integer> countPerValue = new HashMap<>(); Map<Double, Integer> countPerValue = new HashMap<>();
boolean changed;
Double mostPopularCache;
public void add(double value) { public void add(double value) {
changed = true;
if (!countPerValue.containsKey(value)) { if (!countPerValue.containsKey(value)) {
countPerValue.put(value, 1); countPerValue.put(value, 1);
} else { } else {
@ -30,8 +27,6 @@ public class FloatFrequencyCounter {
public void addAll(Map<Double, Integer> otherCounter) { public void addAll(Map<Double, Integer> otherCounter) {
changed = true;
for (Map.Entry<Double, Integer> entry : otherCounter.entrySet()) { for (Map.Entry<Double, Integer> entry : otherCounter.entrySet()) {
if (countPerValue.containsKey(entry.getKey())) { if (countPerValue.containsKey(entry.getKey())) {
countPerValue.put(entry.getKey(), countPerValue.get(entry.getKey()) + entry.getValue()); countPerValue.put(entry.getKey(), countPerValue.get(entry.getKey()) + entry.getValue());
@ -44,27 +39,27 @@ public class FloatFrequencyCounter {
public Double getMostPopular() { public Double getMostPopular() {
if (changed || mostPopularCache == null) { Map.Entry<Double, Integer> mostPopular = null;
Map.Entry<Double, Integer> mostPopular = null; for (Map.Entry<Double, Integer> entry : countPerValue.entrySet()) {
for (Map.Entry<Double, Integer> entry : countPerValue.entrySet()) { if (mostPopular == null || entry.getValue() >= mostPopular.getValue()) {
if (mostPopular == null || entry.getValue() >= mostPopular.getValue()) { mostPopular = entry;
mostPopular = entry;
}
} }
mostPopularCache = mostPopular != null ? mostPopular.getKey() : 0;
changed = false;
} }
return mostPopular != null ? mostPopular.getKey() : null;
return mostPopularCache;
} }
public List<Double> getValuesInReverseOrder() { public List<Double> getHigherThanMostPopular() {
return countPerValue.keySet() Double mostPopular = getMostPopular();
.stream() List<Double> higher = new ArrayList<>();
.sorted(Collections.reverseOrder()) for (Double value : countPerValue.keySet()) {
.collect(Collectors.toList()); if (value > mostPopular) {
higher.add(value);
}
}
return higher.stream().sorted(Collections.reverseOrder()).collect(Collectors.toList());
} }

View File

@ -3,7 +3,7 @@ package com.knecon.fforesight.service.layoutparser.processor.model;
import java.awt.geom.Rectangle2D; import java.awt.geom.Rectangle2D;
import java.util.List; import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import lombok.AccessLevel; import lombok.AccessLevel;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
@ -16,8 +16,8 @@ import lombok.experimental.FieldDefaults;
public class LineInformation { public class LineInformation {
List<Rectangle2D> lineBBox; List<Rectangle2D> lineBBox;
List<List<Word>> sequencesByLines; List<List<TextPositionSequence>> sequencesByLines;
List<List<Rectangle2D>> bBoxWithGapsByLines; List<List<Rectangle2D>> bBoxWithGapsByLines;
List<List<List<Word>>> sequencesWithGapsByLines; List<List<List<TextPositionSequence>>> sequencesWithGapsByLines;
} }

View File

@ -9,14 +9,12 @@ public enum PageBlockType {
H6, H6,
HEADER, HEADER,
FOOTER, FOOTER,
TITLE,
PARAGRAPH, PARAGRAPH,
PARAGRAPH_BOLD, PARAGRAPH_BOLD,
PARAGRAPH_ITALIC, PARAGRAPH_ITALIC,
PARAGRAPH_UNKNOWN, PARAGRAPH_UNKNOWN,
OTHER, OTHER,
TABLE_OF_CONTENTS_HEADLINE,
TABLE_OF_CONTENTS_ITEM,
LIST_ITEM,
TABLE; TABLE;
@ -36,7 +34,7 @@ public enum PageBlockType {
public static int getHeadlineNumber(PageBlockType pageBlockType) { public static int getHeadlineNumber(PageBlockType pageBlockType) {
return switch (pageBlockType) { return switch (pageBlockType) {
case H1, TABLE_OF_CONTENTS_HEADLINE -> 1; case H1 -> 1;
case H2 -> 2; case H2 -> 2;
case H3 -> 3; case H3 -> 3;
case H4 -> 4; case H4 -> 4;
@ -48,6 +46,6 @@ public enum PageBlockType {
public boolean isHeadline() { public boolean isHeadline() {
return this.equals(H1) || this.equals(H2) || this.equals(H3) || this.equals(H4) || this.equals(H5) || this.equals(H6) || this.equals(TABLE_OF_CONTENTS_HEADLINE); return this.equals(H1) || this.equals(H2) || this.equals(H3) || this.equals(H4) || this.equals(H5) || this.equals(H6);
} }
} }

View File

@ -4,7 +4,7 @@ import java.awt.geom.Rectangle2D;
import java.util.List; import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.model.table.Ruling; import com.knecon.fforesight.service.layoutparser.processor.model.table.Ruling;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Builder; import lombok.Builder;
@ -15,7 +15,7 @@ import lombok.Getter;
@AllArgsConstructor @AllArgsConstructor
public class PageContents { public class PageContents {
List<Word> sortedWords; List<TextPositionSequence> sortedTextPositionSequences;
Rectangle2D cropBox; Rectangle2D cropBox;
Rectangle2D mediaBox; Rectangle2D mediaBox;
List<Ruling> rulings; List<Ruling> rulings;

View File

@ -3,7 +3,6 @@ package com.knecon.fforesight.service.layoutparser.processor.model;
import java.util.Collections; import java.util.Collections;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -16,13 +15,11 @@ import lombok.experimental.FieldDefaults;
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE) @FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class SectionIdentifier { public class SectionIdentifier {
public static Pattern numericalIdentifierPattern = Pattern.compile("^[\\s]?(\\d{1,2})(?:[\\s.,;](\\d{1,2}))?(?:[\\s.,;](\\d{1,2}))?(?:[\\s.,;](\\d{1,2}))?"); public static Pattern numericalIdentifierPattern = Pattern.compile("^[\\s]?(\\d+)[\\s.,;]?(\\d+)?[\\s.,;]?(\\d+)?[\\s.,;]?(\\d+)?");
public static Pattern alphanumericIdentifierPattern = Pattern.compile("^[\\s]?[A-Za-z][\\s.,;]?(\\d{1,2})[\\s.,;]?(\\d{1,2})?[\\s.,;]?(\\d{1,2})?[\\s.,;]?(\\d{1,2})?[\\s.,;]?");
public enum Format { public enum Format {
EMPTY, EMPTY,
NUMERICAL, NUMERICAL,
ALPHANUMERIC,
DOCUMENT DOCUMENT
} }
@ -44,10 +41,6 @@ public class SectionIdentifier {
if (numericalIdentifierMatcher.find()) { if (numericalIdentifierMatcher.find()) {
return buildNumericalSectionIdentifier(headline, numericalIdentifierMatcher); return buildNumericalSectionIdentifier(headline, numericalIdentifierMatcher);
} }
Matcher alphanumericIdentifierMatcher = alphanumericIdentifierPattern.matcher(headline);
if (alphanumericIdentifierMatcher.find()) {
return buildAlphanumericSectionIdentifier(headline, alphanumericIdentifierMatcher);
}
// more formats here // more formats here
return SectionIdentifier.empty(); return SectionIdentifier.empty();
} }
@ -82,36 +75,7 @@ public class SectionIdentifier {
} }
identifiers.add(Integer.parseInt(numericalIdentifier.trim())); identifiers.add(Integer.parseInt(numericalIdentifier.trim()));
} }
return new SectionIdentifier(Format.NUMERICAL, return new SectionIdentifier(Format.NUMERICAL, identifierString, identifiers.stream().toList(), false);
identifierString,
identifiers.stream()
.toList(),
false);
}
private static SectionIdentifier buildAlphanumericSectionIdentifier(String headline, Matcher alphanumericIdentifierMatcher) {
String identifierString = headline.substring(alphanumericIdentifierMatcher.start(), alphanumericIdentifierMatcher.end());
String alphanumericIdentifier = alphanumericIdentifierMatcher.group(0).substring(0, 1).toUpperCase(Locale.ENGLISH);
int mappedCharacterValue = alphanumericIdentifier.charAt(0) - 'A' + 1;
List<Integer> identifiers = new LinkedList<>();
identifiers.add(mappedCharacterValue);
for (int i = 1; i <= 3; i++) {
String numericalIdentifier = alphanumericIdentifierMatcher.group(i);
if (numericalIdentifier == null || numericalIdentifier.equals("0") || numericalIdentifier.isEmpty() || numericalIdentifier.isBlank()) {
break;
}
identifiers.add(Integer.parseInt(numericalIdentifier.trim()));
}
return new SectionIdentifier(Format.ALPHANUMERIC,
identifierString,
identifiers.stream()
.toList(),
false);
} }
@ -159,22 +123,4 @@ public class SectionIdentifier {
return identifierString; return identifierString;
} }
public boolean isEmpty() {
return this.format.equals(Format.EMPTY);
}
public int level() {
return identifiers.size();
}
protected List<Integer> getIdentifiers() {
return identifiers;
}
} }

View File

@ -0,0 +1,94 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Document;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Footer;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Header;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Headline;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Image;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Paragraph;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Section;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.SemanticNode;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.SuperSection;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Table;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.TableCell;
public abstract class AbstractNodeVisitor implements NodeVisitor {
@Override
public void visit(Document document) {
visitChildren(document);
}
@Override
public void visit(SuperSection superSection) {
visitChildren(superSection);
}
@Override
public void visit(Section section) {
visitChildren(section);
}
@Override
public void visit(Headline headline) {
visitChildren(headline);
}
@Override
public void visit(Paragraph paragraph) {
visitChildren(paragraph);
}
@Override
public void visit(Footer footer) {
visitChildren(footer);
}
@Override
public void visit(Header header) {
visitChildren(header);
}
@Override
public void visit(Image image) {
visitChildren(image);
}
@Override
public void visit(Table table) {
visitChildren(table);
}
@Override
public void visit(TableCell tableCell) {
visitChildren(tableCell);
}
private void visitChildren(SemanticNode semanticNode) {
semanticNode.streamChildren()
.forEach(node -> node.accept(this));
}
}

View File

@ -0,0 +1,230 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph;
import static java.lang.String.format;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.Stream;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Document;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.GenericSemanticNode;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.SemanticNode;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Table;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.TableCell;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlockCollector;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.experimental.FieldDefaults;
@Data
@EqualsAndHashCode
public class DocumentTree {
private final Entry root;
public DocumentTree(Document document) {
root = Entry.builder().treeId(Collections.emptyList()).children(new LinkedList<>()).node(document).build();
}
public TextBlock buildTextBlock() {
return allEntriesInOrder().map(Entry::getNode)
.filter(SemanticNode::isLeaf)
.map(SemanticNode::getLeafTextBlock)
.collect(new TextBlockCollector());
}
public List<Integer> createNewMainEntryAndReturnId(GenericSemanticNode node) {
return createNewChildEntryAndReturnIdImpl(Collections.emptyList(), node);
}
public List<Integer> createNewChildEntryAndReturnId(GenericSemanticNode parentNode, GenericSemanticNode node) {
return createNewChildEntryAndReturnIdImpl(parentNode.getTreeId(), node);
}
public List<Integer> createNewChildEntryAndReturnId(GenericSemanticNode parentNode, Table node) {
return createNewChildEntryAndReturnIdImpl(parentNode.getTreeId(), node);
}
public List<Integer> createNewTableChildEntryAndReturnId(Table parentTable, TableCell tableCell) {
return createNewChildEntryAndReturnIdImpl(parentTable.getTreeId(), tableCell);
}
@SuppressWarnings("PMD.UnusedPrivateMethod") // PMD actually flags this wrong
private List<Integer> createNewChildEntryAndReturnIdImpl(List<Integer> parentId, SemanticNode node) {
if (!entryExists(parentId)) {
throw new IllegalArgumentException(format("parentId %s does not exist!", parentId));
}
Entry parent = getEntryById(parentId);
List<Integer> newId = new LinkedList<>(parentId);
newId.add(parent.children.size());
parent.children.add(Entry.builder().treeId(newId).node(node).build());
return newId;
}
private boolean entryExists(List<Integer> treeId) {
if (treeId.isEmpty()) {
return root != null;
}
Entry entry = root.children.get(treeId.get(0));
for (int id : treeId.subList(1, treeId.size())) {
if (id >= entry.children.size() || 0 > id) {
return false;
}
entry = entry.children.get(id);
}
return true;
}
public Entry getParentEntryById(List<Integer> treeId) {
return getEntryById(getParentId(treeId));
}
public boolean hasParentById(List<Integer> treeId) {
return !treeId.isEmpty();
}
public Stream<SemanticNode> childNodes(List<Integer> treeId) {
return getEntryById(treeId).children.stream()
.map(Entry::getNode);
}
public Stream<SemanticNode> childNodesOfType(List<Integer> treeId, NodeType nodeType) {
return getEntryById(treeId).children.stream()
.filter(entry -> entry.node.getType().equals(nodeType))
.map(Entry::getNode);
}
private static List<Integer> getParentId(List<Integer> treeId) {
if (treeId.isEmpty()) {
throw new UnsupportedOperationException("Root has no parent!");
}
if (treeId.size() < 2) {
return Collections.emptyList();
}
return treeId.subList(0, treeId.size() - 1);
}
public Entry getEntryById(List<Integer> treeId) {
if (treeId.isEmpty()) {
return root;
}
Entry entry = root;
for (int id : treeId) {
entry = entry.children.get(id);
}
return entry;
}
public Stream<Entry> mainEntries() {
return root.children.stream();
}
public Stream<Entry> allEntriesInOrder() {
return Stream.of(root)
.flatMap(DocumentTree::flatten);
}
public Stream<Entry> allSubEntriesInOrder(List<Integer> parentId) {
return getEntryById(parentId).children.stream()
.flatMap(DocumentTree::flatten);
}
@Override
public String toString() {
return String.join("\n",
allEntriesInOrder().map(Entry::toString)
.toList());
}
private static Stream<Entry> flatten(Entry entry) {
return Stream.concat(Stream.of(entry),
entry.children.stream()
.flatMap(DocumentTree::flatten));
}
public SemanticNode getHighestParentById(List<Integer> treeId) {
if (treeId.isEmpty()) {
return root.node;
}
return root.children.get(treeId.get(0)).node;
}
@Builder
@Getter
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
public static class Entry {
List<Integer> treeId;
SemanticNode node;
@Builder.Default
List<Entry> children = new LinkedList<>();
@Override
public String toString() {
return node.toString();
}
public NodeType getType() {
return node.getType();
}
}
}

View File

@ -0,0 +1,45 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Document;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Footer;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Header;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Headline;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Image;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Paragraph;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Section;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.SuperSection;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Table;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.TableCell;
public interface NodeVisitor {
void visit(Document document);
void visit(SuperSection superSection);
void visit(Section section);
void visit(Headline headline);
void visit(Paragraph paragraph);
void visit(Footer footer);
void visit(Header header);
void visit(Image image);
void visit(Table table);
void visit(TableCell tableCell);
}

View File

@ -0,0 +1,164 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph;
import static java.lang.String.format;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.IntStream;
import lombok.EqualsAndHashCode;
import lombok.Setter;
@Setter
@EqualsAndHashCode
@SuppressWarnings("PMD.AvoidFieldNameMatchingMethodName")
public class TextRange implements Comparable<TextRange> {
private int start;
private int end;
public TextRange(int start, int end) {
if (start > end) {
throw new IllegalArgumentException(format("start: %d > end: %d", start, end));
}
this.start = start;
this.end = end;
}
public int length() {
return end - start;
}
public int start() {
return start;
}
public int end() {
return end;
}
public boolean contains(TextRange textRange) {
return start <= textRange.start() && textRange.end() <= end;
}
public boolean containedBy(TextRange textRange) {
return textRange.contains(this);
}
public boolean contains(int start, int end) {
if (start > end) {
throw new IllegalArgumentException(format("start: %d > end: %d", start, end));
}
return this.start <= start && end <= this.end;
}
public boolean containedBy(int start, int end) {
if (start > end) {
throw new IllegalArgumentException(format("start: %d > end: %d", start, end));
}
return start <= this.start && this.end <= end;
}
public boolean contains(int index) {
return start <= index && index <= end;
}
public boolean containsExclusive(int index) {
return start <= index && index < end;
}
public boolean intersects(TextRange textRange) {
return textRange.start() < this.end && this.start < textRange.end();
}
public List<TextRange> split(List<Integer> splitIndices) {
if (splitIndices.stream()
.anyMatch(idx -> !this.containsExclusive(idx))) {
throw new IndexOutOfBoundsException(format("%s splitting indices are out of range for %s",
splitIndices.stream()
.filter(idx -> !this.containsExclusive(idx))
.toList(),
this));
}
List<TextRange> splitBoundaries = new LinkedList<>();
int previousIndex = start;
for (int splitIndex : splitIndices) {
// skip split if it would produce a boundary of length 0
if (splitIndex == previousIndex) {
continue;
}
splitBoundaries.add(new TextRange(previousIndex, splitIndex));
previousIndex = splitIndex;
}
splitBoundaries.add(new TextRange(previousIndex, end));
return splitBoundaries;
}
public IntStream intStream() {
return IntStream.range(start, end);
}
public static TextRange merge(Collection<TextRange> boundaries) {
int minStart = boundaries.stream()
.mapToInt(TextRange::start)
.min()
.orElseThrow(IllegalArgumentException::new);
int maxEnd = boundaries.stream()
.mapToInt(TextRange::end)
.max()
.orElseThrow(IllegalArgumentException::new);
return new TextRange(minStart, maxEnd);
}
@Override
public String toString() {
return format("Boundary [%d|%d)", start, end);
}
@Override
public int compareTo(TextRange textRange) {
if (end < textRange.end() && start < textRange.start()) {
return -1;
}
if (start > textRange.start() && end > textRange.end()) {
return 1;
}
return 0;
}
}

View File

@ -0,0 +1,8 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.entity;
public enum EntityType {
ENTITY,
RECOMMENDATION,
FALSE_POSITIVE,
FALSE_RECOMMENDATION
}

View File

@ -0,0 +1,24 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.entity;
import java.awt.geom.Rectangle2D;
import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Page;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.experimental.FieldDefaults;
@Data
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
public class RedactionPosition {
final String id;
Page page;
// Each entry in this list corresponds to an entry in the redaction log, this means:
// An entity might be represented by multiple redaction log entries
List<Rectangle2D> rectanglePerLine;
}

View File

@ -0,0 +1,228 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.entity;
import java.awt.geom.Rectangle2D;
import java.util.Collection;
import java.util.Comparator;
import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.iqser.red.service.persistence.service.v1.api.shared.model.redactionlog.Engine;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.TextRange;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Page;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.SemanticNode;
import com.knecon.fforesight.service.layoutparser.processor.utils.IdBuilder;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
@EqualsAndHashCode(onlyExplicitlyIncluded = true)
public class TextEntity {
// initial values
@EqualsAndHashCode.Include
final TextRange textRange;
@EqualsAndHashCode.Include
final String type;
@EqualsAndHashCode.Include
final EntityType entityType;
// empty defaults
boolean redaction;
boolean removed;
boolean ignored;
boolean resized;
boolean skipRemoveEntitiesContainedInLarger;
boolean dictionaryEntry;
boolean dossierDictionaryEntry;
Set<Engine> engines;
Set<TextEntity> references;
@Builder.Default
Deque<Integer> matchedRules = new LinkedList<>();
String redactionReason;
String legalBasis;
// inferred on graph insertion
@EqualsAndHashCode.Include
String value;
String textBefore;
String textAfter;
@Builder.Default
Set<Page> pages = new HashSet<>();
List<RedactionPosition> redactionPositionsPerPage;
@Builder.Default
List<SemanticNode> intersectingNodes = new LinkedList<>();
SemanticNode deepestFullyContainingNode;
public static TextEntity initialEntityNode(TextRange textRange, String type, EntityType entityType) {
return TextEntity.builder().type(type).entityType(entityType).textRange(textRange).engines(new HashSet<>()).references(new HashSet<>()).build();
}
public boolean occursInNodeOfType(Class<? extends SemanticNode> clazz) {
return intersectingNodes.stream().anyMatch(clazz::isInstance);
}
public boolean occursInNode(SemanticNode semanticNode) {
return intersectingNodes.stream().anyMatch(node -> node.equals(semanticNode));
}
public boolean isType(String type) {
return this.type.equals(type);
}
public boolean isAnyType(List<String> types) {
return types.contains(type);
}
public void addIntersectingNode(SemanticNode containingNode) {
intersectingNodes.add(containingNode);
}
public void removeFromGraph() {
intersectingNodes.forEach(node -> node.getEntities().remove(this));
pages.forEach(page -> page.getEntities().remove(this));
intersectingNodes = new LinkedList<>();
deepestFullyContainingNode = null;
pages = new HashSet<>();
removed = true;
ignored = true;
}
public void addMatchedRule(int ruleNumber) {
matchedRules.add(ruleNumber);
}
public int getMatchedRule() {
if (matchedRules.isEmpty()) {
return 0;
}
return matchedRules.getLast();
}
public List<RedactionPosition> getRedactionPositionsPerPage() {
if (redactionPositionsPerPage == null || redactionPositionsPerPage.isEmpty()) {
Map<Page, List<Rectangle2D>> rectanglesPerLinePerPage = deepestFullyContainingNode.getTextBlock().getPositionsPerPage(textRange);
Page firstPage = rectanglesPerLinePerPage.keySet()
.stream()
.min(Comparator.comparingInt(Page::getNumber))
.orElseThrow(() -> new RuntimeException("No Positions found on any page!"));
String id = IdBuilder.buildId(pages, rectanglesPerLinePerPage.values().stream().flatMap(Collection::stream).toList());
redactionPositionsPerPage = rectanglesPerLinePerPage.entrySet().stream().map(entry -> buildRedactionPosition(firstPage, id, entry)).toList();
}
return redactionPositionsPerPage;
}
private static RedactionPosition buildRedactionPosition(Page firstPage, String id, Map.Entry<Page, List<Rectangle2D>> entry) {
if (entry.getKey().equals(firstPage)) {
return new RedactionPosition(id, entry.getKey(), entry.getValue());
} else {
return new RedactionPosition(id + "-" + entry.getKey().getNumber(), entry.getKey(), entry.getValue());
}
}
public boolean containedBy(TextEntity textEntity) {
return this.textRange.containedBy(textEntity.getTextRange());
}
public boolean contains(TextEntity textEntity) {
return this.textRange.contains(textEntity.getTextRange());
}
public boolean intersects(TextEntity textEntity) {
return this.textRange.intersects(textEntity.getTextRange());
}
public void addEngine(Engine engine) {
engines.add(engine);
}
public void addEngines(Set<Engine> engines) {
this.engines.addAll(engines);
}
public void addReference(TextEntity reference) {
references.add(reference);
}
public void addReferences(List<TextEntity> references) {
this.references.addAll(references);
}
public boolean matchesAnnotationId(String manualRedactionId) {
return getRedactionPositionsPerPage().stream().anyMatch(entityPosition -> entityPosition.getId().equals(manualRedactionId));
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("Entity[\"");
sb.append(value);
sb.append("\", ");
sb.append(textRange);
sb.append(", pages[");
pages.forEach(page -> {
sb.append(page.getNumber());
sb.append(", ");
});
sb.delete(sb.length() - 2, sb.length());
sb.append("], type = \"");
sb.append(type);
sb.append("\", EntityType.");
sb.append(entityType);
sb.append("]");
return sb.toString();
}
}

View File

@ -0,0 +1,74 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import java.awt.geom.Rectangle2D;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.LayoutEngine;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.DocumentTree;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.entity.TextEntity;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Data
@SuperBuilder
@AllArgsConstructor
@NoArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
public abstract class AbstractSemanticNode implements GenericSemanticNode {
@Builder.Default
Set<LayoutEngine> engines = new HashSet<>(Set.of(LayoutEngine.ALGORITHM));
List<Integer> treeId;
TextBlock textBlock;
@EqualsAndHashCode.Exclude
DocumentTree documentTree;
@Builder.Default
@EqualsAndHashCode.Exclude
Set<TextEntity> entities = new HashSet<>();
@EqualsAndHashCode.Exclude
Map<Page, Rectangle2D> bBoxCache;
@Override
public TextBlock getTextBlock() {
if (textBlock == null) {
textBlock = GenericSemanticNode.super.getTextBlock();
}
return textBlock;
}
@Override
public String toString() {
return treeId.toString() + ": " + getType() + ": " + this.getTextBlock().buildSummary();
}
@Override
public Map<Page, Rectangle2D> getBBox() {
if (bBoxCache == null) {
bBoxCache = GenericSemanticNode.super.getBBox();
}
return bBoxCache;
}
}

View File

@ -0,0 +1,173 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import java.awt.geom.Rectangle2D;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.DocumentTree;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@AllArgsConstructor
@NoArgsConstructor
@EqualsAndHashCode(callSuper = true)
@FieldDefaults(level = AccessLevel.PRIVATE)
public class Document extends AbstractSemanticNode {
Set<Page> pages;
Integer numberOfPages;
LayoutDebugLayer layoutDebugLayer;
@Override
public NodeType getType() {
return NodeType.DOCUMENT;
}
/**
* Gets the sections of the document as a list.
*
* @return A list of all sections within the document.
*/
public List<Section> getAllSections() {
return streamAllSubNodesOfType(NodeType.SECTION).map(node -> (Section) node)
.collect(Collectors.toList());
}
/**
* Gets the main sections of the document as a list.
*
* @return A list of main sections within the document
* @deprecated This method is marked for removal.
* Use {@link #streamChildrenOfType(NodeType)} instead,
* or {@link #getChildrenOfTypeSectionOrSuperSection()} which returns children of type SECTION as well as SUPER_SECTION.
*/
@Deprecated(forRemoval = true)
public List<Section> getMainSections() {
return streamChildrenOfType(NodeType.SECTION).map(node -> (Section) node)
.collect(Collectors.toList());
}
/**
* Gets the direct children of type SECTION or SUPER_SECTION of the document as a list of SemanticNode objects.
*
* @return A list of all children of type SECTION or SUPER_SECTION.
*/
public List<SemanticNode> getChildrenOfTypeSectionOrSuperSection() {
return streamChildren().filter(semanticNode -> semanticNode.getType().equals(NodeType.SECTION) || semanticNode.getType().equals(NodeType.SUPER_SECTION))
.toList();
}
public List<Header> getHeaders() {
return streamChildrenOfType(NodeType.HEADER).map(node -> (Header) node)
.collect(Collectors.toList());
}
public List<Footer> getFooters() {
return streamChildrenOfType(NodeType.FOOTER).map(node -> (Footer) node)
.collect(Collectors.toList());
}
@Override
public Headline getHeadline() {
return streamAllSubNodesOfType(NodeType.HEADLINE).map(node -> (Headline) node)
.findFirst().orElse(Headline.builder().build());
}
public Stream<TextBlock> streamTerminalTextBlocksInOrder() {
return streamAllNodes().filter(SemanticNode::isLeaf)
.map(SemanticNode::getTextBlock);
}
@Override
public List<Integer> getTreeId() {
return Collections.emptyList();
}
@Override
public void setTreeId(List<Integer> tocId) {
throw new UnsupportedOperationException("Document is always the root of the TablePageBlock of Contents");
}
private Stream<SemanticNode> streamAllNodes() {
return getDocumentTree().allEntriesInOrder()
.map(DocumentTree.Entry::getNode);
}
public Stream<Image> streamAllImages() {
return streamAllSubNodesOfType(NodeType.IMAGE).map(node -> (Image) node);
}
public Map<NodeType, Long> buildSemanticNodeCounts() {
return streamAllSubNodes().collect(Collectors.groupingBy(SemanticNode::getType, Collectors.counting()));
}
@Override
public String toString() {
return NodeType.DOCUMENT + ": " + this.getTextBlock().buildSummary();
}
@Override
public Map<Page, Rectangle2D> getBBox() {
Map<Page, Rectangle2D> bBox = new HashMap<>();
for (Page page : pages) {
bBox.put(page, new Rectangle2D.Double(0, 0, page.getWidth(), page.getHeight()));
}
return bBox;
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
}

View File

@ -0,0 +1,35 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import java.util.stream.Stream;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlockCollector;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.SuperBuilder;
@Data
@EqualsAndHashCode(callSuper = true)
@SuperBuilder
public class DuplicatedParagraph extends Paragraph {
TextBlock unsortedLeafTextBlock;
@Override
public TextBlock getTextBlock() {
return Stream.of(super.getLeafTextBlock(), unsortedLeafTextBlock)
.collect(new TextBlockCollector());
}
@Override
public String toString() {
return super.toString();
}
}

View File

@ -0,0 +1,57 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
@FieldDefaults(level = AccessLevel.PRIVATE)
public class Footer extends AbstractSemanticNode {
TextBlock leafTextBlock;
@Override
public NodeType getType() {
return NodeType.FOOTER;
}
@Override
public boolean isLeaf() {
return true;
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
@Override
public TextBlock getTextBlock() {
return leafTextBlock;
}
@Override
public String toString() {
return getTreeId() + ": " + NodeType.FOOTER + ": " + leafTextBlock.buildSummary();
}
}

View File

@ -0,0 +1,5 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
public interface GenericSemanticNode extends SemanticNode {
}

View File

@ -0,0 +1,58 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
@FieldDefaults(level = AccessLevel.PRIVATE)
public class Header extends AbstractSemanticNode {
TextBlock leafTextBlock;
@Override
public boolean isLeaf() {
return true;
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
@Override
public NodeType getType() {
return NodeType.HEADER;
}
@Override
public TextBlock getTextBlock() {
return leafTextBlock;
}
@Override
public String toString() {
return getTreeId() + ": " + NodeType.HEADER + ": " + leafTextBlock.buildSummary();
}
}

View File

@ -0,0 +1,65 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
@FieldDefaults(level = AccessLevel.PRIVATE)
public class Headline extends AbstractSemanticNode {
TextBlock leafTextBlock;
@Override
public NodeType getType() {
return NodeType.HEADLINE;
}
@Override
public boolean isLeaf() {
return true;
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
@Override
public TextBlock getTextBlock() {
return leafTextBlock;
}
@Override
public String toString() {
return getTreeId() + ": " + NodeType.HEADLINE + ": " + leafTextBlock.buildSummary();
}
@Override
public Headline getHeadline() {
return this;
}
}

View File

@ -0,0 +1,115 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import java.awt.geom.Rectangle2D;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@AllArgsConstructor
@NoArgsConstructor
@EqualsAndHashCode(callSuper = true)
@FieldDefaults(level = AccessLevel.PRIVATE)
public class Image extends AbstractSemanticNode {
String id;
String representationHash;
ImageType imageType;
boolean transparent;
Rectangle2D position;
TextBlock leafTextBlock;
boolean redaction;
boolean ignored;
@Builder.Default
String redactionReason = "";
@Builder.Default
String legalBasis = "";
@Builder.Default
int matchedRule = -1;
@EqualsAndHashCode.Exclude
Page page;
@Override
public NodeType getType() {
return NodeType.IMAGE;
}
@Override
public TextBlock getTextBlock() {
return leafTextBlock;
}
@Override
public Set<Page> getPages() {
return Collections.singleton(page);
}
@Override
public String toString() {
return getTreeId() + ": " + NodeType.IMAGE + ": " + imageType.toString() + " " + position;
}
@Override
public Map<Page, Rectangle2D> getBBox() {
Map<Page, Rectangle2D> bBoxPerPage = new HashMap<>();
bBoxPerPage.put(page, position);
return bBoxPerPage;
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
@Override
public boolean isLeaf() {
return true;
}
public double getArea() {
return position.getWidth() * position.getHeight();
}
public boolean isFullPageImage() {
return imageType.equals(ImageType.OCR) || getArea() >= 0.5 * page.getArea();
}
}

View File

@ -0,0 +1,26 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import java.util.Locale;
public enum ImageType {
LOGO,
FORMULA,
SIGNATURE,
SIGNATURE_VISUAL,
OTHER,
OCR,
GRAPHIC;
public static ImageType fromString(String imageType) {
return switch (imageType.toLowerCase(Locale.ROOT)) {
case "logo" -> ImageType.LOGO;
case "formula" -> ImageType.FORMULA;
case "signature" -> ImageType.SIGNATURE;
case "ocr" -> ImageType.OCR;
case "graphic" -> ImageType.GRAPHIC;
default -> ImageType.OTHER;
};
}
}

View File

@ -0,0 +1,96 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.entity.TextEntity;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlockCollector;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.FieldDefaults;
@Getter
@Setter
@Builder
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
public class Page {
Integer number;
Integer height;
Integer width;
Integer rotation;
@EqualsAndHashCode.Exclude
List<SemanticNode> mainBody;
@EqualsAndHashCode.Exclude
Header header;
@EqualsAndHashCode.Exclude
Footer footer;
@Builder.Default
@EqualsAndHashCode.Exclude
Set<TextEntity> entities = new HashSet<>();
@Builder.Default
@EqualsAndHashCode.Exclude
Set<Image> images = new HashSet<>();
public static Page fromClassificationPage(ClassificationPage classificationPage) {
return Page.builder()
.height((int) classificationPage.getPageHeight())
.width((int) classificationPage.getPageWidth())
.number(classificationPage.getPageNumber())
.rotation(classificationPage.getRotation())
.mainBody(new LinkedList<>())
.build();
}
public TextBlock getMainBodyTextBlock() {
return mainBody.stream()
.filter(SemanticNode::isLeaf)
.map(SemanticNode::getLeafTextBlock)
.collect(new TextBlockCollector());
}
@Override
public String toString() {
return String.valueOf(number);
}
@Override
public int hashCode() {
return number;
}
@Override
public boolean equals(Object o) {
return o instanceof Page && o.hashCode() == this.hashCode();
}
public double getArea() {
return height * width;
}
}

View File

@ -0,0 +1,51 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
@FieldDefaults(level = AccessLevel.PROTECTED)
public class Paragraph extends AbstractSemanticNode {
TextBlock leafTextBlock;
@Override
public NodeType getType() {
return NodeType.PARAGRAPH;
}
@Override
public boolean isLeaf() {
return true;
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
@Override
public TextBlock getTextBlock() {
return leafTextBlock;
}
}

View File

@ -0,0 +1,53 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
@EqualsAndHashCode(callSuper = true)
public class Section extends AbstractSemanticNode {
@Override
public NodeType getType() {
return NodeType.SECTION;
}
public Headline getHeadline() {
return streamChildrenOfType(NodeType.HEADLINE).map(node -> (Headline) node)
.findFirst().orElseGet(() -> getParent().getHeadline());
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
public boolean hasTables() {
return streamAllSubNodesOfType(NodeType.TABLE).findAny().isPresent();
}
@Override
public String toString() {
return getTreeId() + ": " + NodeType.SECTION + ": " + this.getTextBlock().buildSummary();
}
}

View File

@ -0,0 +1,507 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import static java.lang.String.format;
import java.awt.geom.Rectangle2D;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.LayoutEngine;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.DocumentTree;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.TextRange;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.entity.EntityType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.entity.TextEntity;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.AtomicTextBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlockCollector;
import com.knecon.fforesight.service.layoutparser.processor.utils.BBoxMergingUtility;
import com.knecon.fforesight.service.layoutparser.processor.utils.RectangleTransformations;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
public interface SemanticNode {
/**
* Returns the type of this node, such as Section, Paragraph, etc.
*
* @return NodeType of this node
*/
NodeType getType();
/**
* Searches all Nodes located underneath this Node in the DocumentTree and concatenates their AtomicTextBlocks into a single TextBlock.
* So, for a Section all TextBlocks of Subsections, Paragraphs, and Tables are concatenated into a single TextBlock
* If the Node is a Leaf, the LeafTextBlock will be returned instead.
*
* @return TextBlock containing all AtomicTextBlocks that are located under this Node.
*/
default TextBlock getTextBlock() {
return streamAllSubNodes().filter(SemanticNode::isLeaf)
.map(SemanticNode::getTextBlock)
.collect(new TextBlockCollector());
}
/**
* Any Node maintains its own Set of Entities.
* This Set contains all Entities whose boundary intersects the boundary of this node.
*
* @return Set of all Entities associated with this Node
*/
Set<TextEntity> getEntities();
/**
* Each AtomicTextBlock is assigned a page, so to get the pages this node appears on, it collects the PageNodes from each AtomicTextBlock belonging to this node's TextBlock.
*
* @return Set of PageNodes this node appears on.
*/
default Set<Page> getPages() {
return getTextBlock().getPages();
}
default Page getFirstPage() {
return getTextBlock().getPages()
.stream()
.min(Comparator.comparingInt(Page::getNumber)).orElseThrow(() -> new IllegalStateException("SemanticNode has no Page!"));
}
/**
* Each AtomicTextBlock is assigned a page, so to get the pages for this boundary, it collects the PageNodes from each AtomicTextBlock belonging to this node's TextBlock.
*
* @return Set of PageNodes this node appears on.
*/
default Set<Page> getPages(TextRange textRange) {
if (!getBoundary().contains(textRange)) {
throw new IllegalArgumentException(format("%s which was used to query for pages is not contained in the %s of this node!", textRange, getBoundary()));
}
return getTextBlock().getPages(textRange);
}
default boolean isOnPage(int pageNumber) {
return getPages().stream()
.anyMatch(page -> page.getNumber() == pageNumber);
}
/**
* Returns the DocumentTree Object.
*
* @return the DocumentTree of the Document this node belongs to
*/
DocumentTree getDocumentTree();
/**
* The id is a List of Integers uniquely identifying this node in the DocumentTree.
*
* @return the DocumentTree ID
*/
List<Integer> getTreeId();
/**
* This should only be used during graph construction.
*
* @param tocId List of Integers
*/
void setTreeId(List<Integer> tocId);
/**
* Traverses the Tree up, until it hits a Headline or hits a Section which will then return the first Headline from its children.
* Throws NotFoundException if no Headline is found this way
*
* @return First Headline found
*/
default Headline getHeadline() {
return getParent().getHeadline();
}
/**
* Checks if its TocId has a length greater than zero.
*
* @return boolean indicating whether this Node has a Parent in the DocumentTree
*/
default boolean hasParent() {
return getDocumentTree().hasParentById(getTreeId());
}
/**
* @return The SemanticNode representing the Parent in the DocumentTree
* throws NotFoundException, when no parent is present
*/
default SemanticNode getParent() {
return getDocumentTree().getParentEntryById(getTreeId()).getNode();
}
/**
* @return The SemanticNode which is directly underneath the document and also under which this node is.
* if this is the highest child node or the document itself, it returns itself.
*/
default SemanticNode getHighestParent() {
return getDocumentTree().getHighestParentById(getTreeId());
}
/**
* Leaf means a SemanticNode has direct access to a TextBlock, by default this is false and must be overridden.
* Currently only Sections, Images, and Tables are not leaves.
* A TableCell might be a leaf depending on its area compared to the page.
*
* @return boolean, indicating if a Node has direct access to a TextBlock
*/
default boolean isLeaf() {
return false;
}
/**
* Leaf means a SemanticNode has direct access to a TextBlock, by default this is false and must be overridden.
* Currently only Sections and Tables are no leaves.
*
* @return AtomicTextBlock
*/
default TextBlock getLeafTextBlock() {
throw new UnsupportedOperationException("Only leaf Nodes have access to LeafTextBlocks!");
}
/**
* Should only be used during construction of the Graph. Sets the LeafTextBlock of this SemanticNode.
*
* @param textBlock the TextBlock to set as the LeafTextBlock of this SemanticNode
*/
default void setLeafTextBlock(TextBlock textBlock) {
throw new UnsupportedOperationException();
}
/**
* Checks whether this SemanticNode has any Entity with EntityType.ENTITY of the provided type.
*
* @param type string representing the type of entity to check for
* @return true, if this SemanticNode has at least one Entity of the provided type
*/
default boolean hasEntitiesOfType(String type) {
return getEntities().stream()
.filter(entity -> entity.getEntityType().equals(EntityType.ENTITY))
.anyMatch(redactionEntity -> redactionEntity.getType().equals(type));
}
/**
* Returns a List of Entities in this SemanticNode which are of the provided type such as "CBI_author".
*
* @param type string representing the type of entities to return
* @return List of RedactionEntities of any the type
*/
default List<TextEntity> getEntitiesOfType(String type) {
return getEntities().stream()
.filter(redactionEntity -> redactionEntity.getType().equals(type))
.toList();
}
/**
* Returns a List of Entities in this SemanticNode which have any of the provided types such as "CBI_author".
*
* @param types A list of strings representing the types of entities to return
* @return List of RedactionEntities of any provided type
*/
default List<TextEntity> getEntitiesOfType(List<String> types) {
return getEntities().stream()
.filter(redactionEntity -> redactionEntity.isAnyType(types))
.toList();
}
/**
* Each AtomicTextBlock has an index on its page, this returns the number of the first AtomicTextBlock underneath this node.
* If this node does not have any AtomicTexBlocks underneath it, e.g. an empty TableCell. It returns -1.
*
* @return Integer representing the number on the page
*/
default Integer getNumberOnPage() {
TextBlock textBlock = getTextBlock();
if (!textBlock.getAtomicTextBlocks().isEmpty()) {
return getTextBlock().getAtomicTextBlocks().get(0).getNumberOnPage();
} else {
return -1;
}
}
/**
* Checks if the SemanticNode contains any text.
*
* @return true, if this node's TextBlock is not empty
*/
default boolean hasText() {
return !getTextBlock().isEmpty();
}
/**
* Checks whether this SemanticNode contains the provided String.
*
* @param string A String which the TextBlock might contain
* @return true, if this node's TextBlock contains the string
*/
default boolean containsString(String string) {
return getTextBlock().getSearchText().contains(string);
}
/**
* Checks whether this SemanticNode contains all the provided Strings.
*
* @param strings A List of Strings which the TextBlock might contain
* @return true, if this node's TextBlock contains all strings
*/
default boolean containsStrings(List<String> strings) {
return strings.stream()
.allMatch(this::containsString);
}
/**
* Checks whether this SemanticNode contains all the provided Strings ignoring case.
*
* @param string A String which the TextBlock might contain
* @return true, if this node's TextBlock contains the string ignoring case
*/
default boolean containsStringIgnoreCase(String string) {
return getTextBlock().getSearchText().toLowerCase(Locale.ROOT).contains(string.toLowerCase(Locale.ROOT));
}
/**
* Checks whether this SemanticNode contains any of the provided Strings.
*
* @param strings A List of Strings which the TextBlock might contain
* @return true, if this node's TextBlock contains any of the strings
*/
default boolean containsAnyString(List<String> strings) {
return strings.stream()
.anyMatch(this::containsString);
}
/**
* Checks whether this SemanticNode contains any of the provided Strings ignoring case.
*
* @param strings A List of Strings which the TextBlock might contain
* @return true, if this node's TextBlock contains any of the strings
*/
default boolean containsAnyStringIgnoreCase(List<String> strings) {
return strings.stream()
.anyMatch(this::containsStringIgnoreCase);
}
/**
* This function is used during insertion of EntityNodes into the graph, it checks if the boundary of the RedactionEntity intersects or even contains the RedactionEntity.
* It sets the fields accordingly and recursively calls this function on all its children.
*
* @param textEntity RedactionEntity, which is being inserted into the graph
*/
default void addThisToEntityIfIntersects(TextEntity textEntity) {
TextBlock textBlock = getTextBlock();
if (textBlock.getTextRange().intersects(textEntity.getTextRange())) {
if (textBlock.containsBoundary(textEntity.getTextRange())) {
textEntity.setDeepestFullyContainingNode(this);
}
textEntity.addIntersectingNode(this);
streamChildren().filter(semanticNode -> semanticNode.getBoundary().intersects(textEntity.getTextRange()))
.forEach(node -> node.addThisToEntityIfIntersects(textEntity));
}
}
/**
* returns the set of layoutengines.
*
* @return set of layoutengines.
*/
Set<LayoutEngine> getEngines();
/**
* adds a layoutengine to the set.
*/
default void addEngine(LayoutEngine engine) {
getEngines().add(engine);
}
/**
* Streams all children located directly underneath this node in the DocumentTree.
*
* @return Stream of all children
*/
default Stream<SemanticNode> streamChildren() {
return getDocumentTree().childNodes(getTreeId());
}
/**
* Streams all children located directly underneath this node in the DocumentTree of the provided type.
*
* @return Stream of all children
*/
default Stream<SemanticNode> streamChildrenOfType(NodeType nodeType) {
return getDocumentTree().childNodesOfType(getTreeId(), nodeType);
}
/**
* Recursively streams all SemanticNodes located underneath this node in the DocumentTree in order.
*
* @return Stream of all SubNodes
*/
default Stream<SemanticNode> streamAllSubNodes() {
return getDocumentTree().allSubEntriesInOrder(getTreeId())
.map(DocumentTree.Entry::getNode);
}
/**
* Recursively streams all SemanticNodes of the provided type located underneath this node in the DocumentTree in order.
*
* @return Stream of all SubNodes
*/
default Stream<SemanticNode> streamAllSubNodesOfType(NodeType nodeType) {
return getDocumentTree().allSubEntriesInOrder(getTreeId())
.filter(entry -> entry.getType().equals(nodeType))
.map(DocumentTree.Entry::getNode);
}
/**
* The Boundary is the start and end string offsets in the reading order of the document.
*
* @return Boundary of this Node's TextBlock
*/
default TextRange getBoundary() {
return getTextBlock().getTextRange();
}
/**
* If this Node is a Leaf it will calculate the boundingBox of its LeafTextBlock, otherwise it will calculate the Union of the BoundingBoxes of all its Children.
* If called on the Document, it will return the cropbox of each page
*
* @return Rectangle2D fully encapsulating this Node for each page.
*/
default Map<Page, Rectangle2D> getBBox() {
if (isLeaf()) {
return getBBoxFromLeafTextBlock();
}
return getBBoxFromChildren();
}
/**
* Checks whether the Bounding Box of this SemanticNode contains the provided rectangle on the provided page.
*
* @param rectangle2D The rectangle to check if it is contained
* @param pageNumber The Page number on which the rectangle should be checked
* @return boolean
*/
default boolean containsRectangle(Rectangle2D rectangle2D, Integer pageNumber) {
Page helperPage = Page.builder().number(pageNumber).build();
if (!getPages().contains(helperPage)) {
return false;
}
return getBBox().get(helperPage).contains(rectangle2D);
}
/**
* TODO: this produces unwanted results for sections spanning multiple columns.
* Computes the Union of the bounding boxes of all children recursively.
*
* @return The union of the BoundingBoxes of all children
*/
private Map<Page, Rectangle2D> getBBoxFromChildren() {
List<Map<Page, Rectangle2D>> childrenBBoxes = streamChildren().filter(child -> !isFullPageImage(child))
.map(SemanticNode::getBBox)
.toList();
return BBoxMergingUtility.mergeBBoxes(childrenBBoxes);
}
private static boolean isFullPageImage(SemanticNode child) {
if (!child.getType().equals(NodeType.IMAGE)) {
return false;
}
return ((Image) child).isFullPageImage();
}
/**
* @return The union of all BoundingBoxes of the TextBlock of this node
*/
private Map<Page, Rectangle2D> getBBoxFromLeafTextBlock() {
Map<Page, Rectangle2D> bBoxPerPage = new HashMap<>();
Map<Page, List<AtomicTextBlock>> atomicTextBlockPerPage = getTextBlock().getAtomicTextBlocks()
.stream()
.collect(Collectors.groupingBy(AtomicTextBlock::getPage));
atomicTextBlockPerPage.forEach((page, atbs) -> bBoxPerPage.put(page, RectangleTransformations.bBoxUnionAtomicTextBlock(atbs)));
return bBoxPerPage;
}
void accept(NodeVisitor visitor);
}

View File

@ -0,0 +1,47 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
@EqualsAndHashCode(callSuper = true)
public class SuperSection extends AbstractSemanticNode {
@Override
public NodeType getType() {
return NodeType.SUPER_SECTION;
}
public Headline getHeadline() {
return streamChildrenOfType(NodeType.HEADLINE).map(node -> (Headline) node)
.findFirst().orElseGet(() -> getParent().getHeadline());
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
@Override
public String toString() {
return getTreeId() + ": " + NodeType.SUPER_SECTION + ": " + this.getTextBlock().buildSummary();
}
}

View File

@ -0,0 +1,363 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import static java.lang.String.format;
import java.awt.geom.Rectangle2D;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.LayoutEngine;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.DocumentTree;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.entity.TextEntity;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlockCollector;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
public class Table implements SemanticNode {
@Builder.Default
Set<LayoutEngine> engines = new HashSet<>(Set.of(LayoutEngine.ALGORITHM));
List<Integer> treeId;
DocumentTree documentTree;
int numberOfRows;
int numberOfCols;
TextBlock textBlock;
@Builder.Default
@EqualsAndHashCode.Exclude
Set<TextEntity> entities = new HashSet<>();
@EqualsAndHashCode.Exclude
Map<Page, Rectangle2D> bBoxCache;
/**
* Streams all entities in this table, that appear in a row, which contains any of the provided strings.
*
* @param strings Strings to check whether a row contains them
* @return Stream of all entities in this table, that appear in a row, which contains any of the provided strings
*/
public Stream<TextEntity> streamEntitiesWhereRowContainsStringsIgnoreCase(List<String> strings) {
return IntStream.range(0, numberOfRows).boxed()
.filter(row -> rowContainsStringsIgnoreCase(row, strings))
.flatMap(this::streamRow)
.map(TableCell::getEntities)
.flatMap(Collection::stream);
}
/**
* Checks whether the specified row contains all the provided strings.
*
* @param row the row to check as an Integer, must be smaller than numberOfRows
* @param strings a list of strings to check for
* @return true, if all strings appear in the provided row
*/
public boolean rowContainsStringsIgnoreCase(Integer row, List<String> strings) {
String rowText = streamRow(row).map(TableCell::getTextBlock)
.collect(new TextBlockCollector()).getSearchText().toLowerCase(Locale.ROOT);
return strings.stream()
.map(String::toLowerCase)
.allMatch(rowText::contains);
}
/**
* Streams all entities which appear in a row where at least one cell has the provided header and the provided value.
*
* @param header the header value to search for
* @param value the string which the table cell should contain
* @return a stream of all entities, which appear in a row where at least one cell has the provided header and the provided value.
*/
public Stream<TextEntity> streamEntitiesWhereRowHasHeaderAndValue(String header, String value) {
List<Integer> vertebrateStudyCols = streamHeaders().filter(headerNode -> headerNode.containsString(header))
.map(TableCell::getCol)
.toList();
return streamTableCells().filter(tableCellNode -> vertebrateStudyCols.stream()
.anyMatch(vertebrateStudyCol -> getCell(tableCellNode.getRow(), vertebrateStudyCol).containsString(value)))
.map(TableCell::getEntities)
.flatMap(Collection::stream);
}
/**
* Streams all entities which appear in a row where at least one cell has the provided header and any provided value.
*
* @param header the header value to search for
* @param values the strings which the table cell should contain
* @return a stream of all entities, which appear in a row where at least one cell has the provided header and any provided value.
*/
public Stream<TextEntity> streamEntitiesWhereRowHasHeaderAndAnyValue(String header, List<String> values) {
List<Integer> colsWithHeader = streamHeaders().filter(headerNode -> headerNode.containsString(header))
.map(TableCell::getCol)
.toList();
return streamTableCells().filter(tableCellNode -> colsWithHeader.stream()
.anyMatch(colWithHeader -> getCell(tableCellNode.getRow(), colWithHeader).containsAnyString(values)))
.map(TableCell::getEntities)
.flatMap(Collection::stream);
}
/**
* Streams all entities in this table, that appear in a row, which contains at least one entity with any of the provided types.
*
* @param types type strings to check whether a row contains an entity like them
* @return Stream of all entities in this table, that appear in a row, which contains at least one entity with any of the provided types.
*/
public Stream<TextEntity> streamEntitiesWhereRowContainsEntitiesOfType(List<String> types) {
List<Integer> rowsWithEntityOfType = IntStream.range(0, numberOfRows).boxed()
.filter(rowNumber -> streamEntityTypesInRow(rowNumber).anyMatch(existingType -> types.stream()
.anyMatch(typeToCheck -> typeToCheck.equals(existingType))))
.toList();
return rowsWithEntityOfType.stream()
.flatMap(this::streamRow)
.map(TableCell::getEntities)
.flatMap(Collection::stream);
}
/**
* Streams all entities in this table, that appear in a row, which does not contain any entity with any of the provided types.
*
* @param types type strings to check whether a row doesn't contain an entity like it
* @return Stream of all entities in this table, that appear in a row, which does not contain any entity with any of the provided types.
*/
public Stream<TextEntity> streamEntitiesWhereRowContainsNoEntitiesOfType(List<String> types) {
List<Integer> rowsWithNoEntityOfType = IntStream.range(0, numberOfRows).boxed()
.filter(rowNumber -> streamEntityTypesInRow(rowNumber).noneMatch(existingType -> types.stream()
.anyMatch(typeToCheck -> typeToCheck.equals(existingType))))
.toList();
return rowsWithNoEntityOfType.stream()
.flatMap(this::streamRow)
.map(TableCell::getEntities)
.flatMap(Collection::stream);
}
private Stream<String> streamEntityTypesInRow(Integer rowNumber) {
return streamRow(rowNumber).map(TableCell::getEntities)
.flatMap(Collection::stream)
.map(TextEntity::getType)
.distinct();
}
/**
* Returns a TableCell at the provided row and column location.
*
* @param row int representing the row, must be smaller than numberOfRows
* @param col int representing the col, must be smaller than numberOfCols
* @return TableCell at the provided location in the table
*/
public TableCell getCell(int row, int col) {
if (numberOfRows - row < 0 || numberOfCols - col < 0) {
throw new IllegalArgumentException(format("row %d, col %d is out of bounds for number of rows of %d and number of cols %d", row, col, numberOfRows, numberOfCols));
}
int idx = row * numberOfCols + col;
return (TableCell) documentTree.getEntryById(treeId).getChildren()
.get(idx).getNode();
}
/**
* Streams all TableCells in this Table row-wise.
*
* @return Stream of all TableCells
*/
public Stream<TableCell> streamTableCells() {
return streamChildrenOfType(NodeType.TABLE_CELL).map(node -> (TableCell) node);
}
/**
* Streams all TableCells in this Table which have the provided header row-wise.
*
* @return Stream of all TableCells which have the provided header
*/
public Stream<TableCell> streamTableCellsWithHeader(String header) {
return streamHeaders().filter(tableCellNode -> tableCellNode.getTextBlock().getSearchText().contains(header))
.map(TableCell::getCol)
.flatMap(this::streamCol)
.filter(tableCellNode -> !tableCellNode.isHeader());
}
/**
* Streams all TableCells belonging to the provided column from top down.
*
* @param col int representing the column
* @return Stream of all TableCell in the provided column
*/
public Stream<TableCell> streamCol(int col) {
return IntStream.range(0, numberOfRows).boxed()
.map(row -> getCell(row, col));
}
/**
* Streams all TableCells belonging to the provided row from left to right.
*
* @param row int representing the row
* @return Stream of all TableCell in the provided row
*/
public Stream<TableCell> streamRow(int row) {
return IntStream.range(0, numberOfCols).boxed()
.map(col -> getCell(row, col));
}
/**
* Streams all TableCells row-wise and filters them with header == true.
*
* @return Stream of all TableCells with header == true
*/
public Stream<TableCell> streamHeaders() {
return streamTableCells().filter(TableCell::isHeader);
}
/**
* Streams all TableCells of the provided row and column and filters them with header == true.
*
* @param row int representing the row
* @param col int representing the column
* @return Stream of all TableCells with header == true in the provided row or col
*/
public Stream<TableCell> streamHeadersForCell(int row, int col) {
return Stream.concat(streamRow(row), streamCol(col))
.filter(TableCell::isHeader);
}
/**
* Streams all Headers and checks if any equal the provided string.
*
* @param header string to check the headers for
* @return true, if at least one header equals the provided string
*/
public boolean hasHeader(String header) {
return streamHeaders().anyMatch(tableCellNode -> tableCellNode.getTextBlock().getSearchText().strip().equals(header));
}
/**
* Checks if this table has a column with the provided header and any of the table cells in that column contain the provided value.
*
* @param header string to find header cells
* @param value string to check cells with provided header
* @return true, if this table has a column with the provided header and any of the table cells in that column contain the provided value
*/
public boolean hasRowWithHeaderAndValue(String header, String value) {
return streamTableCellsWithHeader(header).anyMatch(tableCellNode -> tableCellNode.containsString(value));
}
/**
* Checks if this table has a column with the provided header and any of the table cells in that column contains any of the provided values.
*
* @param header string to find header cells
* @param values List of strings to check cells with provided header
* @return true, if this table has a column with the provided header and any of the table cells in that column contains any of the provided values.
*/
public boolean hasRowWithHeaderAndAnyValue(String header, List<String> values) {
return streamTableCellsWithHeader(header).anyMatch(tableCellNode -> tableCellNode.containsAnyString(values));
}
/**
* Finds all entities of the provided type, which appear in the same row that the provided entity appears in.
*
* @param type the type of entities to search for
* @param textEntity the entity, which appears in the row to search
* @return List of all entities of the provided type, which appear in the same row that the provided entity appears in.
*/
public List<TextEntity> getEntitiesOfTypeInSameRow(String type, TextEntity textEntity) {
return textEntity.getIntersectingNodes()
.stream()
.filter(node -> node instanceof TableCell)
.map(node -> (TableCell) node)
.flatMap(tableCellNode -> streamRow(tableCellNode.getRow()))
.map(cell -> cell.getEntitiesOfType(type))
.flatMap(Collection::stream)
.toList();
}
@Override
public NodeType getType() {
return NodeType.TABLE;
}
@Override
public TextBlock getTextBlock() {
if (textBlock == null) {
textBlock = SemanticNode.super.getTextBlock();
}
return textBlock;
}
@Override
public String toString() {
return treeId.toString() + ": " + NodeType.TABLE + ": #cols: " + numberOfCols + ", #rows: " + numberOfRows + ", " + this.getTextBlock().buildSummary();
}
@Override
public Map<Page, Rectangle2D> getBBox() {
if (bBoxCache == null) {
bBoxCache = SemanticNode.super.getBBox();
}
return bBoxCache;
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
}

View File

@ -0,0 +1,95 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes;
import java.awt.geom.Rectangle2D;
import java.util.HashMap;
import java.util.Map;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.NodeType;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.NodeVisitor;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock.TextBlockCollector;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
import lombok.experimental.SuperBuilder;
@Data
@SuperBuilder
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true)
@FieldDefaults(level = AccessLevel.PRIVATE)
public class TableCell extends AbstractSemanticNode {
int row;
int col;
boolean header;
Rectangle2D bBox;
TextBlock leafTextBlock;
TextBlock textBlock;
@Override
public Map<Page, Rectangle2D> getBBox() {
Map<Page, Rectangle2D> bBoxPerPage = new HashMap<>();
getPages().forEach(page -> bBoxPerPage.put(page, bBox));
return bBoxPerPage;
}
@Override
public void accept(NodeVisitor visitor) {
visitor.visit(this);
}
@Override
public NodeType getType() {
return NodeType.TABLE_CELL;
}
@Override
public boolean isLeaf() {
return getDocumentTree().getEntryById(getTreeId()).getChildren().isEmpty();
}
@Override
public TextBlock getTextBlock() {
if (isLeaf()) {
return leafTextBlock;
}
if (textBlock == null) {
textBlock = buildTextBlock();
}
return textBlock;
}
private TextBlock buildTextBlock() {
return streamAllSubNodes().filter(SemanticNode::isLeaf)
.map(SemanticNode::getLeafTextBlock)
.collect(new TextBlockCollector());
}
@Override
public String toString() {
return getTreeId() + ": " + NodeType.TABLE_CELL + ": " + this.getTextBlock().buildSummary();
}
}

View File

@ -0,0 +1,275 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock;
import static java.lang.String.format;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentPositionData;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.DocumentTextData;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.TextRange;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Page;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.SemanticNode;
import com.knecon.fforesight.service.layoutparser.processor.utils.RectangleTransformations;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@AllArgsConstructor
@FieldDefaults(level = AccessLevel.PRIVATE)
public class AtomicTextBlock implements TextBlock {
Long id;
Integer numberOnPage;
Page page;
//string coordinates
TextRange textRange;
String searchText;
@Builder.Default
List<Integer> lineBreaks = new ArrayList<>();
@Builder.Default
List<TextRange> boldTextBoundaries = new ArrayList<>();
@Builder.Default
List<TextRange> italicTextBoundaries = new ArrayList<>();
String orientation;
int textDirection;
//position coordinates
@Builder.Default
List<Integer> stringIdxToPositionIdx = new ArrayList<>();
@Builder.Default
List<Rectangle2D> positions = new ArrayList<>();
@EqualsAndHashCode.Exclude
SemanticNode parent;
@Override
public int numberOfLines() {
return lineBreaks.size() + 1;
}
@Override
public String subSequenceWithLineBreaks(TextRange stringTextRange) {
if (stringTextRange.length() == 0 || !getTextRange().contains(stringTextRange)) {
return "";
}
Set<Integer> lbInBoundary = lineBreaks.stream()
.map(i -> i + this.textRange.start())
.filter(stringTextRange::contains)
.collect(Collectors.toSet());
if (stringTextRange.end() == getTextRange().end()) {
lbInBoundary.add(getTextRange().end());
}
StringBuilder sb = new StringBuilder();
for (int i = stringTextRange.start(); i < stringTextRange.end(); i++) {
char character = this.charAt(i);
if (lbInBoundary.contains(i + 1)) {
// always plus one, due to the linebreaks being an exclusive end index
if (!Character.isWhitespace(character)) {
lbInBoundary.remove(i + 1);
lbInBoundary.add(i + 2);
sb.append(character);
continue;
}
sb.append("\n");
} else {
sb.append(character);
}
}
return sb.toString();
}
public static AtomicTextBlock fromSearchTextWithTextPosition(String searchText,
List<Integer> lineBreaks,
List<TextRange> boldTextBoundaries,
List<TextRange> italicTextBoundaries,
List<Rectangle2D> positions,
List<Integer> stringIdxToPositionIdx,
long idx,
SemanticNode parent,
int numberOnPage,
Page page,
int offset,
String orientation,
int textDirection) {
return AtomicTextBlock.builder()
.id(idx)
.parent(parent)
.searchText(searchText)
.numberOnPage(numberOnPage)
.page(page)
.lineBreaks(lineBreaks)
.boldTextBoundaries(boldTextBoundaries)
.italicTextBoundaries(italicTextBoundaries)
.positions(positions)
.stringIdxToPositionIdx(stringIdxToPositionIdx)
.textRange(new TextRange(offset, offset + searchText.length()))
.textDirection(textDirection)
.orientation(orientation)
.build();
}
public static AtomicTextBlock empty(Long textBlockIdx, int stringOffset, Page page, int numberOnPage, SemanticNode parent) {
return AtomicTextBlock.builder()
.id(textBlockIdx)
.textRange(new TextRange(stringOffset, stringOffset))
.searchText("")
.page(page)
.numberOnPage(numberOnPage)
.parent(parent)
.build();
}
public static AtomicTextBlock fromAtomicTextBlockData(DocumentTextData documentTextData, DocumentPositionData documentPositionData, SemanticNode parent, Page page) {
return AtomicTextBlock.builder()
.id(documentTextData.getId())
.numberOnPage(documentTextData.getNumberOnPage())
.page(page)
.textRange(new TextRange(documentTextData.getStart(), documentTextData.getEnd()))
.searchText(documentTextData.getSearchText())
.lineBreaks(Arrays.stream(documentTextData.getLineBreaks()).boxed()
.toList())
.stringIdxToPositionIdx(Arrays.stream(documentPositionData.getStringIdxToPositionIdx()).boxed()
.toList())
.positions(toRectangle2DList(documentPositionData.getPositions()))
.parent(parent)
.build();
}
private static List<Rectangle2D> toRectangle2DList(float[][] positions) {
return Arrays.stream(positions)
.map(floatArr -> (Rectangle2D) new Rectangle2D.Float(floatArr[0], floatArr[1], floatArr[2], floatArr[3]))
.toList();
}
public CharSequence getLine(int lineNumber) {
if (lineNumber >= numberOfLines() || lineNumber < 0) {
throw new IndexOutOfBoundsException(format("line %d out of range for AtomicTextBlock with %d lines", lineNumber, numberOfLines()));
}
if (lineNumber == 0) {
if (lineBreaks.isEmpty()) {
return searchText;
}
return subSequence(textRange.start(), lineBreaks.get(0) + textRange.start());
} else if (lineNumber == numberOfLines() - 1) {
return subSequence(lineBreaks.get(lineBreaks.size() - 1) + textRange.start(), textRange.end());
}
return subSequence(lineBreaks.get(lineNumber - 1) + textRange.start(), lineBreaks.get(lineNumber) + textRange.start());
}
@Override
public List<AtomicTextBlock> getAtomicTextBlocks() {
return List.of(this);
}
@Override
public int getNextLinebreak(int fromIndex) {
return lineBreaks.stream()//
.filter(linebreak -> linebreak > fromIndex - textRange.start()) //
.findFirst() //
.orElse(searchText.length()) + textRange.start();
}
@Override
public int getPreviousLinebreak(int fromIndex) {
return lineBreaks.stream()//
.filter(linebreak -> linebreak <= fromIndex - textRange.start())//
.reduce((a, b) -> b)//
.orElse(0) + textRange.start();
}
@Override
public Rectangle2D getPosition(int stringIdx) {
return positions.get(stringIdxToPositionIdx.get(stringIdx - textRange.start()));
}
@Override
public List<Rectangle2D> getPositions(TextRange stringTextRange) {
if (!containsBoundary(stringTextRange)) {
throw new IndexOutOfBoundsException(format("%s is out of bounds for %s", stringTextRange, this.textRange));
}
if (stringTextRange.length() == 0) {
return Collections.emptyList();
}
int startPositionIdx = stringIdxToPositionIdx.get(stringTextRange.start() - this.textRange.start());
if (stringTextRange.end() == this.textRange.end()) {
return positions.subList(startPositionIdx, positions.size());
}
return positions.subList(startPositionIdx, stringIdxToPositionIdx.get(stringTextRange.end() - this.textRange.start()));
}
public Map<Page, List<Rectangle2D>> getPositionsPerPage(TextRange stringTextRange) {
List<Rectangle2D> rectanglesPerLine = stringTextRange.split(getAllLineBreaksInBoundary(stringTextRange))
.stream()
.map(this::getPositions)
.map(RectangleTransformations::rectangleBBoxWithGaps)
.flatMap(Collection::stream)
.toList();
Map<Page, List<Rectangle2D>> rectanglePerLinePerPage = new HashMap<>();
rectanglePerLinePerPage.put(page, rectanglesPerLine);
return rectanglePerLinePerPage;
}
protected List<Integer> getAllLineBreaksInBoundary(TextRange textRange) {
return getLineBreaks().stream()
.map(linebreak -> linebreak + this.textRange.start())
.filter(textRange::contains)
.toList();
}
@Override
public String toString() {
return searchText;
}
}

View File

@ -0,0 +1,271 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock;
import static java.lang.String.format;
import java.awt.geom.Rectangle2D;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.TextRange;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Page;
import lombok.AccessLevel;
import lombok.Data;
import lombok.experimental.FieldDefaults;
@Data
@FieldDefaults(level = AccessLevel.PRIVATE)
public class ConcatenatedTextBlock implements TextBlock {
List<AtomicTextBlock> atomicTextBlocks;
String searchText;
TextRange textRange;
public static ConcatenatedTextBlock empty() {
return new ConcatenatedTextBlock(Collections.emptyList());
}
public ConcatenatedTextBlock(List<AtomicTextBlock> atomicTextBlocks) {
this.atomicTextBlocks = new LinkedList<>();
if (atomicTextBlocks.isEmpty()) {
textRange = new TextRange(-1, -1);
return;
}
var firstTextBlock = atomicTextBlocks.get(0);
this.atomicTextBlocks.add(firstTextBlock);
textRange = new TextRange(firstTextBlock.getTextRange().start(), firstTextBlock.getTextRange().end());
atomicTextBlocks.subList(1, atomicTextBlocks.size())
.forEach(this::concat);
}
public ConcatenatedTextBlock concat(TextBlock textBlock) {
int start = textBlock.getTextRange().start();
int end = textBlock.getTextRange().end();
if (this.atomicTextBlocks.isEmpty()) {
textRange.setStart(start);
textRange.setEnd(end);
} else if (textRange.end() != start) {
throw new UnsupportedOperationException(format("Can only concat consecutive TextBlocks, trying to concat %s and %s", textRange, textBlock.getTextRange()));
}
this.atomicTextBlocks.addAll(textBlock.getAtomicTextBlocks());
textRange.setEnd(end);
this.searchText = null;
return this;
}
private AtomicTextBlock getAtomicTextBlockByStringIndex(int stringIdx) {
return atomicTextBlocks.stream()
.filter(textBlock -> textBlock.getTextRange().containsExclusive(stringIdx))
.findAny()
.orElseThrow(IndexOutOfBoundsException::new);
}
private List<AtomicTextBlock> getAllAtomicTextBlocksPartiallyInStringBoundary(TextRange textRange) {
return atomicTextBlocks.stream()
.filter(tb -> tb.getTextRange().intersects(textRange))
.toList();
}
@Override
public String getSearchText() {
if (searchText == null) {
StringBuilder sb = new StringBuilder();
getAtomicTextBlocks().forEach(atb -> sb.append(atb.getSearchText()));
searchText = sb.toString();
}
return searchText;
}
@Override
public int numberOfLines() {
return atomicTextBlocks.stream()
.map(AtomicTextBlock::getLineBreaks)
.mapToInt(List::size).sum();
}
@Override
public int getNextLinebreak(int fromIndex) {
return getAtomicTextBlockByStringIndex(fromIndex).getNextLinebreak(fromIndex);
}
@Override
public int getPreviousLinebreak(int fromIndex) {
return getAtomicTextBlockByStringIndex(fromIndex).getPreviousLinebreak(fromIndex);
}
@Override
public List<Integer> getLineBreaks() {
return getAtomicTextBlocks().stream()
.flatMap(atomicTextBlock -> atomicTextBlock.getLineBreaks()
.stream())
.toList();
}
@Override
public Rectangle2D getPosition(int stringIdx) {
return getAtomicTextBlockByStringIndex(stringIdx).getPosition(stringIdx);
}
@Override
public List<Rectangle2D> getPositions(TextRange stringTextRange) {
List<AtomicTextBlock> textBlocks = getAllAtomicTextBlocksPartiallyInStringBoundary(stringTextRange);
if (textBlocks.size() == 1) {
return textBlocks.get(0).getPositions(stringTextRange);
}
AtomicTextBlock firstTextBlock = textBlocks.get(0);
List<Rectangle2D> positions = new LinkedList<>(firstTextBlock.getPositions(new TextRange(stringTextRange.start(), firstTextBlock.getTextRange().end())));
for (AtomicTextBlock textBlock : textBlocks.subList(1, textBlocks.size() - 1)) {
positions.addAll(textBlock.getPositions());
}
var lastTextBlock = textBlocks.get(textBlocks.size() - 1);
positions.addAll(lastTextBlock.getPositions(new TextRange(lastTextBlock.getTextRange().start(), stringTextRange.end())));
return positions;
}
@Override
public Map<Page, List<Rectangle2D>> getPositionsPerPage(TextRange stringTextRange) {
List<AtomicTextBlock> textBlocks = getAllAtomicTextBlocksPartiallyInStringBoundary(stringTextRange);
if (textBlocks.size() == 1) {
return textBlocks.get(0).getPositionsPerPage(stringTextRange);
}
AtomicTextBlock firstTextBlock = textBlocks.get(0);
Map<Page, List<Rectangle2D>> rectanglesPerLinePerPage = firstTextBlock.getPositionsPerPage(new TextRange(stringTextRange.start(), firstTextBlock.getTextRange().end()));
for (AtomicTextBlock textBlock : textBlocks.subList(1, textBlocks.size() - 1)) {
rectanglesPerLinePerPage = mergeEntityPositionsWithSamePageNode(rectanglesPerLinePerPage, textBlock.getPositionsPerPage(textBlock.getTextRange()));
}
AtomicTextBlock lastTextBlock = textBlocks.get(textBlocks.size() - 1);
rectanglesPerLinePerPage = mergeEntityPositionsWithSamePageNode(rectanglesPerLinePerPage,
lastTextBlock.getPositionsPerPage(new TextRange(lastTextBlock.getTextRange().start(),
stringTextRange.end())));
return rectanglesPerLinePerPage;
}
private Map<Page, List<Rectangle2D>> mergeEntityPositionsWithSamePageNode(Map<Page, List<Rectangle2D>> map1, Map<Page, List<Rectangle2D>> map2) {
Map<Page, List<Rectangle2D>> mergedMap = new HashMap<>(map1);
map2.forEach((pageNode, rectangles) -> mergedMap.merge(pageNode,
rectangles,
(l1, l2) -> Stream.concat(l1.stream(), l2.stream())
.toList()));
return mergedMap;
}
@Override
public String subSequenceWithLineBreaks(TextRange stringTextRange) {
if (stringTextRange.length() == 0 || !getTextRange().contains(stringTextRange)) {
return "";
}
List<AtomicTextBlock> textBlocks = getAllAtomicTextBlocksPartiallyInStringBoundary(stringTextRange);
if (textBlocks.size() == 1) {
return textBlocks.get(0).subSequenceWithLineBreaks(stringTextRange);
}
StringBuilder sb = new StringBuilder();
AtomicTextBlock firstTextBlock = textBlocks.get(0);
sb.append(firstTextBlock.subSequenceWithLineBreaks(new TextRange(stringTextRange.start(), firstTextBlock.getTextRange().end())));
for (AtomicTextBlock textBlock : textBlocks.subList(1, textBlocks.size() - 1)) {
sb.append(textBlock.searchTextWithLineBreaks());
}
var lastTextBlock = textBlocks.get(textBlocks.size() - 1);
sb.append(lastTextBlock.subSequenceWithLineBreaks(new TextRange(lastTextBlock.getTextRange().start(), stringTextRange.end())));
return sb.toString();
}
@Override
public String toString() {
return getSearchText();
}
@Override
public List<TextRange> getBoldTextBoundaries() {
return getAtomicTextBlocks().stream()
.map(AtomicTextBlock::getBoldTextBoundaries)
.flatMap(Collection::stream)
.toList();
}
@Override
public List<TextRange> getItalicTextBoundaries() {
return getAtomicTextBlocks().stream()
.map(AtomicTextBlock::getItalicTextBoundaries)
.flatMap(Collection::stream)
.toList();
}
@Override
public String getOrientation() {
if (atomicTextBlocks.isEmpty()) {
return "";
}
return atomicTextBlocks.get(0).getOrientation();
}
@Override
public int getTextDirection() {
if (atomicTextBlocks.isEmpty()) {
return 0;
}
return atomicTextBlocks.get(0).getTextDirection();
}
}

View File

@ -0,0 +1,158 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock;
import static java.lang.String.format;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.TextRange;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Page;
public interface TextBlock extends CharSequence {
String getSearchText();
List<AtomicTextBlock> getAtomicTextBlocks();
List<TextRange> getBoldTextBoundaries();
List<TextRange> getItalicTextBoundaries();
String getOrientation();
int getTextDirection();
TextRange getTextRange();
int getNextLinebreak(int fromIndex);
int getPreviousLinebreak(int fromIndex);
List<Integer> getLineBreaks();
Rectangle2D getPosition(int stringIdx);
List<Rectangle2D> getPositions(TextRange stringTextRange);
Map<Page, List<Rectangle2D>> getPositionsPerPage(TextRange stringTextRange);
int numberOfLines();
String subSequenceWithLineBreaks(TextRange stringTextRange);
default String searchTextWithLineBreaks() {
return subSequenceWithLineBreaks(getTextRange());
}
default int indexOf(String searchTerm) {
return indexOf(searchTerm, getTextRange().start());
}
default Set<Page> getPages() {
return getAtomicTextBlocks().stream()
.map(AtomicTextBlock::getPage)
.collect(Collectors.toUnmodifiableSet());
}
default Set<Page> getPages(TextRange textRange) {
return getAtomicTextBlocks().stream()
.filter(atomicTextBlock -> atomicTextBlock.getTextRange().intersects(textRange))
.map(AtomicTextBlock::getPage)
.collect(Collectors.toUnmodifiableSet());
}
default int indexOf(String searchTerm, int startOffset) {
int start = getSearchText().indexOf(searchTerm, startOffset - getTextRange().start());
if (start == -1) {
return -1;
}
return start + getTextRange().start();
}
default CharSequence getFirstLine() {
return subSequence(getTextRange().start(), getNextLinebreak(getTextRange().start()));
}
default boolean containsBoundary(TextRange textRange) {
if (textRange.end() < textRange.start()) {
throw new IllegalArgumentException(format("Invalid %s, StartIndex must be smaller than EndIndex", textRange));
}
return getTextRange().contains(textRange);
}
default boolean containsIndex(int stringIndex) {
return getTextRange().containsExclusive(stringIndex);
}
default CharSequence subSequence(TextRange textRange) {
return subSequence(textRange.start(), textRange.end());
}
default String buildSummary() {
String[] words = getSearchText().split(" ");
int bound = Math.min(words.length, 4);
List<String> list = new ArrayList<>(Arrays.asList(words).subList(0, bound));
return String.join(" ", list);
}
@Override
default CharSequence subSequence(int start, int end) {
return getSearchText().substring(start - getTextRange().start(), end - getTextRange().start());
}
@Override
default int length() {
return getTextRange().length();
}
@Override
default char charAt(int index) {
return getSearchText().charAt(index - getTextRange().start());
}
}

View File

@ -0,0 +1,49 @@
package com.knecon.fforesight.service.layoutparser.processor.model.graph.textblock;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collector;
import lombok.NoArgsConstructor;
@NoArgsConstructor
public class TextBlockCollector implements Collector<TextBlock, ConcatenatedTextBlock, TextBlock> {
@Override
public Supplier<ConcatenatedTextBlock> supplier() {
return ConcatenatedTextBlock::empty;
}
@Override
public BiConsumer<ConcatenatedTextBlock, TextBlock> accumulator() {
return ConcatenatedTextBlock::concat;
}
@Override
public BinaryOperator<ConcatenatedTextBlock> combiner() {
return ConcatenatedTextBlock::concat;
}
@Override
public Function<ConcatenatedTextBlock, TextBlock> finisher() {
return a -> a;
}
@Override
public Set<Characteristics> characteristics() {
return Set.of(Characteristics.IDENTITY_FINISH, Characteristics.CONCURRENT);
}
}

View File

@ -2,7 +2,7 @@ package com.knecon.fforesight.service.layoutparser.processor.model.image;
import java.awt.geom.Rectangle2D; import java.awt.geom.Rectangle2D;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.ImageType; import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.ImageType;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;
@ -22,10 +22,11 @@ public class ClassifiedImage {
private boolean isAppendedToSection; private boolean isAppendedToSection;
private boolean hasTransparency; private boolean hasTransparency;
private int page; private int page;
@NonNull
private String representation; private String representation;
public ClassifiedImage(@NonNull Rectangle2D position, @NonNull ImageType imageType, boolean hasTransparency, int page, String representation) { public ClassifiedImage(@NonNull Rectangle2D position, @NonNull ImageType imageType, boolean hasTransparency, int page, @NonNull String representation) {
this.position = position; this.position = position;
this.imageType = imageType; this.imageType = imageType;

View File

@ -1,6 +1,5 @@
package com.knecon.fforesight.service.layoutparser.processor.model.outline; package com.knecon.fforesight.service.layoutparser.processor.model.outline;
import java.awt.geom.AffineTransform;
import java.awt.geom.Point2D; import java.awt.geom.Point2D;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -27,9 +26,6 @@ import org.apache.pdfbox.pdmodel.interactive.documentnavigation.outline.PDDocume
import org.apache.pdfbox.pdmodel.interactive.documentnavigation.outline.PDOutlineItem; import org.apache.pdfbox.pdmodel.interactive.documentnavigation.outline.PDOutlineItem;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.knecon.fforesight.service.layoutparser.processor.utils.CoordinateTransforms;
import com.knecon.fforesight.service.layoutparser.processor.utils.PageInformation;
import lombok.SneakyThrows; import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@ -93,13 +89,12 @@ public class OutlineExtractorService {
if (page == null) { if (page == null) {
return Optional.empty(); return Optional.empty();
} }
} catch (IOException e) { }catch (IOException e){
log.info(String.format("Error occurred during position resolution for outline item with title %s: " + e, title)); log.info(String.format("Error occurred during position resolution for outline item with title %s: " + e, title));
return Optional.empty(); return Optional.empty();
} }
int pageNumber = document.getPages().indexOf(page) + 1; int pageNumber = document.getPages().indexOf(page);
AffineTransform userSpaceToPageCoords = CoordinateTransforms.calculateInitialUserSpaceCoordsToPageCoords(PageInformation.fromPDPage(pageNumber, page));
Optional<Point2D> outlinePosition = Optional.empty(); Optional<Point2D> outlinePosition = Optional.empty();
@ -128,15 +123,8 @@ public class OutlineExtractorService {
log.info(String.format("Error occurred during position resolution for outline item on page %s with title %s: " + e, pageNumber, title)); log.info(String.format("Error occurred during position resolution for outline item on page %s with title %s: " + e, pageNumber, title));
} }
return Optional.of(new OutlineObjectTreeNode(new OutlineObject(title, return Optional.of(new OutlineObjectTreeNode(new OutlineObject(title, pageNumber, outlinePosition.orElse(new Point2D.Float(0, 0)), depth)));
pageNumber,
transformPointToPageCoords(outlinePosition, userSpaceToPageCoords), depth)));
}
private static Point2D transformPointToPageCoords(Optional<Point2D> outlinePosition, AffineTransform userSpaceToPageCoords) {
return outlinePosition.map(point -> userSpaceToPageCoords.transform(point, null)).orElse(null);
} }

View File

@ -1,34 +1,27 @@
package com.knecon.fforesight.service.layoutparser.processor.model.outline; package com.knecon.fforesight.service.layoutparser.processor.model.outline;
import java.awt.geom.Point2D; import java.awt.geom.Point2D;
import java.util.Optional;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.BoundingBox; import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.Getter; import lombok.RequiredArgsConstructor;
import lombok.Setter;
@Data
@RequiredArgsConstructor
@AllArgsConstructor
public class OutlineObject { public class OutlineObject {
@Getter
private final String title; private final String title;
@Getter
private final int pageNumber; private final int pageNumber;
@Getter private Point2D point;
private final int treeDepth; private final int treeDepth;
private Point2D point; // java coordinates, (0, 0) is always top left
@Getter
@Setter
private boolean found; private boolean found;
public OutlineObject(String title, int pageNumber, Point2D point2D, int depth) { public OutlineObject(String title, int pageNumber, Point2D point2D, int depth) {
this.title = title; this(title, pageNumber, depth);
this.pageNumber = pageNumber;
this.treeDepth = depth;
this.point = point2D; this.point = point2D;
} }
@ -39,39 +32,4 @@ public class OutlineObject {
return "OutlineObject{" + "title='" + title + '\'' + '}'; return "OutlineObject{" + "title='" + title + '\'' + '}';
} }
public Optional<Point2D> getPoint() {
return Optional.ofNullable(point);
}
public boolean isAbove(BoundingBox boundingBox) {
if (point == null) {
return true;
}
return point.getY() <= boundingBox.getMaxY();
}
public double distance(BoundingBox boundingBox) {
if (point == null) {
return 0;
}
if (boundingBox.getBBox().contains(point)) {
return 0;
}
double deltaX = Math.min(Math.abs(boundingBox.getMinX() - point.getX()), Math.abs(boundingBox.getMaxX() - point.getX()));
double deltaY = Math.min(Math.abs(boundingBox.getMinY() - point.getY()), Math.abs(boundingBox.getMaxY() - point.getY()));
return Math.sqrt(deltaX * deltaX + deltaY * deltaY);
}
public void resetPoint() {
this.point = null;
}
} }

View File

@ -39,28 +39,4 @@ public class OutlineObjectTree {
} }
} }
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("OutlineObjectTree(\n");
for (OutlineObjectTreeNode node : rootNodes) {
buildString(node, sb, 1);
}
sb.append(")");
return sb.toString();
}
private void buildString(OutlineObjectTreeNode node, StringBuilder sb, int depth) {
for (int i = 0; i < depth; i++) {
sb.append(" ");
}
sb.append(node.getOutlineObject().getTitle()).append("\n");
for (OutlineObjectTreeNode child : node.getChildren()) {
buildString(child, sb, depth + 1);
}
}
} }

View File

@ -1,6 +1,5 @@
package com.knecon.fforesight.service.layoutparser.processor.model.outline; package com.knecon.fforesight.service.layoutparser.processor.model.outline;
import static com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType.TABLE_OF_CONTENTS_HEADLINE;
import static com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType.getHeadlineNumber; import static com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType.getHeadlineNumber;
import java.util.ArrayList; import java.util.ArrayList;
@ -11,7 +10,6 @@ import java.util.TreeSet;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationDocument;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import io.micrometer.observation.annotation.Observed; import io.micrometer.observation.annotation.Observed;
@ -19,23 +17,21 @@ import lombok.extern.slf4j.Slf4j;
@Service @Service
@Slf4j @Slf4j
public class SectionTreeBuilderService { public class OutlineValidationService {
@Observed(name = "OutlineValidationService", contextualName = "create-toc") @Observed(name = "OutlineValidationService", contextualName = "create-toc")
public SectionTree createSectionTree(ClassificationDocument classificationDocument) { public TableOfContents createToC(List<TextPageBlock> headlines) {
List<TextPageBlock> headlines = extractHeadlines(classificationDocument); List<TableOfContentItem> mainSections = new ArrayList<>();
Map<Integer, TableOfContentItem> lastItemsPerDepth = new HashMap<>();
List<SectionTreeEntry> mainSections = new ArrayList<>(); TableOfContentItem last = null;
Map<Integer, SectionTreeEntry> lastItemsPerDepth = new HashMap<>();
SectionTreeEntry last = null;
TreeSet<Integer> depths = new TreeSet<>(); TreeSet<Integer> depths = new TreeSet<>();
for (TextPageBlock current : headlines) { for (TextPageBlock current : headlines) {
int currentDepth = getHeadlineNumber(current.getClassification()); int currentDepth = getHeadlineNumber(current.getClassification());
Integer parentDepth = depths.floor(currentDepth - 1); Integer parentDepth = depths.floor(currentDepth - 1);
var tocItem = new SectionTreeEntry(current); var tocItem = new TableOfContentItem(current);
if (parentDepth == null) { if (parentDepth == null) {
mainSections.add(tocItem); mainSections.add(tocItem);
@ -45,16 +41,14 @@ public class SectionTreeBuilderService {
} else { } else {
assert last != null; assert last != null;
int lastDepth = getHeadlineNumber(last.getHeadline().getClassification()); int lastDepth = getHeadlineNumber(last.getHeadline().getClassification());
if (last.getHeadline().getClassification().equals(TABLE_OF_CONTENTS_HEADLINE) && !current.getClassification().equals(TABLE_OF_CONTENTS_HEADLINE)) {
// headline after toc should always start a main section if (lastDepth < parentDepth) {
parentDepth = 1;
} else if (lastDepth < parentDepth) {
parentDepth = lastDepth; parentDepth = lastDepth;
} else if (lastDepth == currentDepth && last.getParent() != null) { } else if (lastDepth == currentDepth && last.getParent() != null) {
parentDepth = getHeadlineNumber(last.getParent().getHeadline().getClassification()); parentDepth = getHeadlineNumber(last.getParent().getHeadline().getClassification());
} }
SectionTreeEntry parent = lastItemsPerDepth.get(parentDepth); TableOfContentItem parent = lastItemsPerDepth.get(parentDepth);
parent.addChild(tocItem); parent.addChild(tocItem);
} }
@ -63,20 +57,7 @@ public class SectionTreeBuilderService {
depths.add(currentDepth); depths.add(currentDepth);
} }
return new SectionTree(mainSections); return new TableOfContents(mainSections);
}
private static List<TextPageBlock> extractHeadlines(ClassificationDocument classificationDocument) {
return classificationDocument.getPages()
.stream()
.flatMap(classificationPage -> classificationPage.getTextBlocks()
.stream()
.filter(tb -> tb instanceof TextPageBlock && tb.getClassification() != null && tb.getClassification().isHeadline())
.map(tb -> (TextPageBlock) tb))
.toList();
} }
} }

View File

@ -1,7 +1,6 @@
package com.knecon.fforesight.service.layoutparser.processor.model.outline; package com.knecon.fforesight.service.layoutparser.processor.model.outline;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@ -23,28 +22,28 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j @Slf4j
@Service @Service
public class SectionTreeEnhancementService { public class TOCEnrichmentService {
public void assignSectionBlocksAndImages(ClassificationDocument document) { public void assignSectionBlocksAndImages(ClassificationDocument document) {
SectionTree toc = document.getSectionTree(); TableOfContents toc = document.getTableOfContents();
Iterator<SectionTreeEntry> iterator = toc.iterator(); Iterator<TableOfContentItem> iterator = toc.iterator();
SectionTreeEntry currentTOCItem = null; TableOfContentItem currentTOCItem = null;
if (iterator.hasNext()) { if (iterator.hasNext()) {
currentTOCItem = iterator.next(); currentTOCItem = iterator.next();
} }
List<AbstractPageBlock> startBlocks = new ArrayList<>(); List<AbstractPageBlock> startBlocks = new ArrayList<>();
List<ClassifiedImage> startImages = new ArrayList<>(); List<ClassifiedImage> startImages = new ArrayList<>();
SectionTreeEntry currentSection = null; TableOfContentItem currentSection = null;
boolean foundFirstHeadline = false; boolean foundFirstHeadline = false;
List<ClassificationHeader> headers = new ArrayList<>(); List<ClassificationHeader> headers = new ArrayList<>();
List<ClassificationFooter> footers = new ArrayList<>(); List<ClassificationFooter> footers = new ArrayList<>();
TablePageBlock previousTable = null; TablePageBlock previousTable = null;
List<SectionTreeEntry> lastFoundTOCItems = new ArrayList<>(); List<TableOfContentItem> lastFoundTOCItems = new ArrayList<>();
for (ClassificationPage page : document.getPages()) { for (ClassificationPage page : document.getPages()) {
List<SectionTreeEntry> currentPageTOCItems = new ArrayList<>(); List<TableOfContentItem> currentPageTOCItems = new ArrayList<>();
List<TextPageBlock> header = new ArrayList<>(); List<TextPageBlock> header = new ArrayList<>();
List<TextPageBlock> footer = new ArrayList<>(); List<TextPageBlock> footer = new ArrayList<>();
for (AbstractPageBlock current : page.getTextBlocks()) { for (AbstractPageBlock current : page.getTextBlocks()) {
@ -101,7 +100,7 @@ public class SectionTreeEnhancementService {
Double xMax = null; Double xMax = null;
Double yMax = null; Double yMax = null;
for (SectionTreeEntry tocItem : lastFoundTOCItems) { for (TableOfContentItem tocItem : lastFoundTOCItems) {
var headline = tocItem.getHeadline(); var headline = tocItem.getHeadline();
if (headline.getPage() != page.getPageNumber()) { if (headline.getPage() != page.getPageNumber()) {
@ -169,10 +168,10 @@ public class SectionTreeEnhancementService {
} }
if (!startBlocks.isEmpty() || !startImages.isEmpty()) { if (!startBlocks.isEmpty() || !startImages.isEmpty()) {
SectionTreeEntry unassigned = new SectionTreeEntry(null); TableOfContentItem unassigned = new TableOfContentItem(null);
unassigned.setSectionBlocks(startBlocks); unassigned.setSectionBlocks(startBlocks);
unassigned.setImages(startImages); unassigned.setImages(startImages);
document.getSectionTree().getMainSections().add(0, unassigned); document.getTableOfContents().getMainSections().add(0, unassigned);
} }
document.setHeaders(headers); document.setHeaders(headers);
document.setFooters(footers); document.setFooters(footers);
@ -186,8 +185,12 @@ public class SectionTreeEnhancementService {
List<Cell> previousTableNonHeaderRow = getRowWithNonHeaderCells(previousTable); List<Cell> previousTableNonHeaderRow = getRowWithNonHeaderCells(previousTable);
List<Cell> tableNonHeaderRow = getRowWithNonHeaderCells(currentTable); List<Cell> tableNonHeaderRow = getRowWithNonHeaderCells(currentTable);
// Allow merging of tables if header row is separated from first logical non-header row // Allow merging of tables if header row is separated from first logical non-header row
if (previousTableNonHeaderRow.isEmpty() && previousTable.getRowCount() == 1 && previousTable.getRows().get(0).size() == tableNonHeaderRow.size()) { if (previousTableNonHeaderRow.isEmpty()
previousTableNonHeaderRow = previousTable.getRows().get(0) && previousTable.getRowCount() == 1
&& previousTable.getRows()
.get(0).size() == tableNonHeaderRow.size()) {
previousTableNonHeaderRow = previousTable.getRows()
.get(0)
.stream() .stream()
.map(cell -> { .map(cell -> {
Cell fakeCell = Cell.copy(cell); Cell fakeCell = Cell.copy(cell);
@ -198,7 +201,8 @@ public class SectionTreeEnhancementService {
} }
if (previousTableNonHeaderRow.size() == tableNonHeaderRow.size()) { if (previousTableNonHeaderRow.size() == tableNonHeaderRow.size()) {
for (int i = currentTable.getRowCount() - 1; i >= 0; i--) { // Non header rows are most likely at bottom of table for (int i = currentTable.getRowCount() - 1; i >= 0; i--) { // Non header rows are most likely at bottom of table
List<Cell> row = currentTable.getRows().get(i); List<Cell> row = currentTable.getRows()
.get(i);
if (row.size() == tableNonHeaderRow.size() && row.stream() if (row.size() == tableNonHeaderRow.size() && row.stream()
.allMatch(cell -> cell.getHeaderCells().isEmpty())) { .allMatch(cell -> cell.getHeaderCells().isEmpty())) {
for (int j = 0; j < row.size(); j++) { for (int j = 0; j < row.size(); j++) {
@ -221,15 +225,18 @@ public class SectionTreeEnhancementService {
return table.getRows() return table.getRows()
.stream() .stream()
.flatMap(Collection::stream) .flatMap(row -> row.stream()
.allMatch(cell -> cell.getHeaderCells().isEmpty()); .filter(cell -> !cell.getHeaderCells().isEmpty()))
.findAny().isEmpty();
} }
private List<Cell> getRowWithNonHeaderCells(TablePageBlock table) { private List<Cell> getRowWithNonHeaderCells(TablePageBlock table) {
for (int i = table.getRowCount() - 1; i >= 0; i--) { // Non header rows are most likely at bottom of table for (int i = table.getRowCount() - 1; i >= 0; i--) { // Non header rows are most likely at bottom of table
List<Cell> row = table.getRows().get(i); List<Cell> row = table.getRows()
.get(i);
if (row.size() == 1) { if (row.size() == 1) {
continue; continue;
} }

View File

@ -2,12 +2,10 @@ package com.knecon.fforesight.service.layoutparser.processor.model.outline;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.GenericSemanticNode;
import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType; import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.AbstractSemanticNode;
import com.knecon.fforesight.service.layoutparser.processor.model.image.ClassifiedImage; import com.knecon.fforesight.service.layoutparser.processor.model.image.ClassifiedImage;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
@ -16,68 +14,52 @@ import lombok.EqualsAndHashCode;
@Data @Data
@EqualsAndHashCode(onlyExplicitlyIncluded = true) @EqualsAndHashCode(onlyExplicitlyIncluded = true)
public class SectionTreeEntry { public class TableOfContentItem {
public enum Type {
SECTION,
SUPER_SECTION,
TOC_SECTION
}
@EqualsAndHashCode.Include @EqualsAndHashCode.Include
private TextPageBlock headline; private TextPageBlock headline;
private List<SectionTreeEntry> children = new ArrayList<>(); private List<TableOfContentItem> children = new ArrayList<>();
private SectionTreeEntry parent; private TableOfContentItem parent;
private List<AbstractPageBlock> sectionBlocks = new ArrayList<>(); private List<AbstractPageBlock> sectionBlocks = new ArrayList<>();
private List<ClassifiedImage> images = new ArrayList<>(); private List<ClassifiedImage> images = new ArrayList<>();
private GenericSemanticNode section; private AbstractSemanticNode section;
public SectionTreeEntry(TextPageBlock headline) { public TableOfContentItem(TextPageBlock headline) {
this.headline = headline; this.headline = headline;
} }
public Type getType() { public void addChild(TableOfContentItem tableOfContentItem) {
if (!Objects.isNull(headline) && headline.getClassification().equals(PageBlockType.TABLE_OF_CONTENTS_HEADLINE)) { children.add(tableOfContentItem);
return Type.TOC_SECTION; tableOfContentItem.setParent(this);
}
if (children.isEmpty()) {
return Type.SECTION;
}
return Type.SUPER_SECTION;
} }
public void addChild(SectionTreeEntry sectionTreeEntry) { public TableOfContentItem getSiblingBefore() {
children.add(sectionTreeEntry);
sectionTreeEntry.setParent(this);
}
public SectionTreeEntry getSiblingBefore() {
if (parent != null) { if (parent != null) {
int index = parent.getChildren().indexOf(this); int index = parent.getChildren().indexOf(this);
if (index > 0) { if (index > 0) {
return parent.getChildren().get(index - 1); return parent.getChildren()
.get(index - 1);
} }
} }
return null; return null;
} }
public SectionTreeEntry getSiblingAfter() { public TableOfContentItem getSiblingAfter() {
if (parent != null) { if (parent != null) {
int index = parent.getChildren().indexOf(this); int index = parent.getChildren().indexOf(this);
if (index >= 0 && index < parent.getChildren().size() - 1) { if (index >= 0 && index < parent.getChildren().size() - 1) {
return parent.getChildren().get(index + 1); return parent.getChildren()
.get(index + 1);
} }
} }
return null; return null;
@ -89,7 +71,7 @@ public class SectionTreeEntry {
if (headline.equals(block)) { if (headline.equals(block)) {
return true; return true;
} }
for (SectionTreeEntry child : children) { for (TableOfContentItem child : children) {
if (child.contains(block)) { if (child.contains(block)) {
return true; return true;
} }
@ -98,12 +80,12 @@ public class SectionTreeEntry {
} }
public boolean contains(SectionTreeEntry tocItem) { public boolean contains(TableOfContentItem tocItem) {
if (this.equals(tocItem)) { if (this.equals(tocItem)) {
return true; return true;
} }
for (SectionTreeEntry child : children) { for (TableOfContentItem child : children) {
if (child.contains(tocItem)) { if (child.contains(tocItem)) {
return true; return true;
} }
@ -111,19 +93,17 @@ public class SectionTreeEntry {
return false; return false;
} }
public List<AbstractPageBlock> getNonEmptySectionBlocks() { public List<AbstractPageBlock> getNonEmptySectionBlocks() {
return sectionBlocks.stream() return sectionBlocks.stream().filter(pageBlock -> !pageBlock.isEmpty()).collect(Collectors.toList());
.filter(pageBlock -> !pageBlock.isEmpty())
.collect(Collectors.toList());
} }
@Override @Override
public String toString() { public String toString() {
return "OutlineObjectTreeNode{" + "textPageBlock=" + headline + '}'; return "OutlineObjectTreeNode{" + "textPageBlock=" + headline + '}';
} }
} }

View File

@ -14,12 +14,12 @@ import lombok.RequiredArgsConstructor;
@Data @Data
@RequiredArgsConstructor @RequiredArgsConstructor
public class SectionTree implements Iterable<SectionTreeEntry> { public class TableOfContents implements Iterable<TableOfContentItem> {
private List<SectionTreeEntry> mainSections = new ArrayList<>(); private List<TableOfContentItem> mainSections = new ArrayList<>();
public SectionTree(List<SectionTreeEntry> mainSections) { public TableOfContents(List<TableOfContentItem> mainSections) {
this.mainSections = mainSections; this.mainSections = mainSections;
} }
@ -28,36 +28,36 @@ public class SectionTree implements Iterable<SectionTreeEntry> {
public List<TextPageBlock> getAllTextPageBlocks() { public List<TextPageBlock> getAllTextPageBlocks() {
List<TextPageBlock> allTextPageBlocks = new ArrayList<>(); List<TextPageBlock> allTextPageBlocks = new ArrayList<>();
for (SectionTreeEntry item : mainSections) { for (TableOfContentItem item : mainSections) {
collectTextPageBlocks(item, allTextPageBlocks); collectTextPageBlocks(item, allTextPageBlocks);
} }
return allTextPageBlocks; return allTextPageBlocks;
} }
private void collectTextPageBlocks(SectionTreeEntry item, List<TextPageBlock> textPageBlocks) { private void collectTextPageBlocks(TableOfContentItem item, List<TextPageBlock> textPageBlocks) {
textPageBlocks.add(item.getHeadline()); textPageBlocks.add(item.getHeadline());
for (SectionTreeEntry child : item.getChildren()) { for (TableOfContentItem child : item.getChildren()) {
collectTextPageBlocks(child, textPageBlocks); collectTextPageBlocks(child, textPageBlocks);
} }
} }
public List<SectionTreeEntry> getAllTableOfContentItems() { public List<TableOfContentItem> getAllTableOfContentItems() {
List<SectionTreeEntry> allItems = new ArrayList<>(); List<TableOfContentItem> allItems = new ArrayList<>();
for (SectionTreeEntry item : mainSections) { for (TableOfContentItem item : mainSections) {
collectTableOfContentItems(item, allItems); collectTableOfContentItems(item, allItems);
} }
return allItems; return allItems;
} }
private void collectTableOfContentItems(SectionTreeEntry item, List<SectionTreeEntry> allItems) { private void collectTableOfContentItems(TableOfContentItem item, List<TableOfContentItem> allItems) {
allItems.add(item); allItems.add(item);
for (SectionTreeEntry child : item.getChildren()) { for (TableOfContentItem child : item.getChildren()) {
collectTableOfContentItems(child, allItems); collectTableOfContentItems(child, allItems);
} }
} }
@ -65,7 +65,7 @@ public class SectionTree implements Iterable<SectionTreeEntry> {
private boolean containsBlock(TextPageBlock block) { private boolean containsBlock(TextPageBlock block) {
for (SectionTreeEntry existingItem : this.getMainSections()) { for (TableOfContentItem existingItem : this.getMainSections()) {
if (existingItem.getHeadline().equals(block) || existingItem.contains(block)) { if (existingItem.getHeadline().equals(block) || existingItem.contains(block)) {
return true; return true;
} }
@ -74,9 +74,9 @@ public class SectionTree implements Iterable<SectionTreeEntry> {
} }
private boolean containsItem(SectionTreeEntry tocItem) { private boolean containsItem(TableOfContentItem tocItem) {
for (SectionTreeEntry existingItem : this.getMainSections()) { for (TableOfContentItem existingItem : this.getMainSections()) {
if (existingItem.equals(tocItem) || existingItem.contains(tocItem)) { if (existingItem.equals(tocItem) || existingItem.contains(tocItem)) {
return true; return true;
} }
@ -86,18 +86,18 @@ public class SectionTree implements Iterable<SectionTreeEntry> {
@Override @Override
public @NonNull Iterator<SectionTreeEntry> iterator() { public @NonNull Iterator<TableOfContentItem> iterator() {
return new SectionTreeEntryIterator(mainSections); return new TableOfContentItemIterator(mainSections);
} }
private static class SectionTreeEntryIterator implements Iterator<SectionTreeEntry> { private static class TableOfContentItemIterator implements Iterator<TableOfContentItem> {
private final Stack<Iterator<SectionTreeEntry>> stack = new Stack<>(); private final Stack<Iterator<TableOfContentItem>> stack = new Stack<>();
SectionTreeEntryIterator(List<SectionTreeEntry> mainSections) { TableOfContentItemIterator(List<TableOfContentItem> mainSections) {
stack.push(mainSections.iterator()); stack.push(mainSections.iterator());
} }
@ -112,10 +112,10 @@ public class SectionTree implements Iterable<SectionTreeEntry> {
@Override @Override
public SectionTreeEntry next() { public TableOfContentItem next() {
ensureStackTopIsCurrent(); ensureStackTopIsCurrent();
SectionTreeEntry currentItem = stack.peek().next(); TableOfContentItem currentItem = stack.peek().next();
if (currentItem.getChildren() != null && !currentItem.getChildren().isEmpty()) { if (currentItem.getChildren() != null && !currentItem.getChildren().isEmpty()) {
stack.push(currentItem.getChildren() stack.push(currentItem.getChildren()
.iterator()); .iterator());

View File

@ -9,7 +9,7 @@ import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.BoundingBox; import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.BoundingBox;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import com.knecon.fforesight.service.layoutparser.processor.utils.TextNormalizationUtilities; import com.knecon.fforesight.service.layoutparser.processor.utils.TextNormalizationUtilities;
import lombok.Data; import lombok.Data;
@ -68,12 +68,12 @@ public class Cell extends BoundingBox {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
Iterator<TextPageBlock> itty = textBlocks.iterator(); Iterator<TextPageBlock> itty = textBlocks.iterator();
Word previous = null; TextPositionSequence previous = null;
while (itty.hasNext()) { while (itty.hasNext()) {
TextPageBlock textBlock = itty.next(); TextPageBlock textBlock = itty.next();
for (Word word : textBlock.getWords()) { for (TextPositionSequence word : textBlock.getSequences()) {
if (previous != null) { if (previous != null) {
if (Math.abs(previous.getMaxYDirAdj() - word.getMaxYDirAdj()) > word.getTextHeight()) { if (Math.abs(previous.getMaxYDirAdj() - word.getMaxYDirAdj()) > word.getTextHeight()) {
sb.append('\n'); sb.append('\n');
@ -87,7 +87,7 @@ public class Cell extends BoundingBox {
} }
return TextNormalizationUtilities.cleanString(sb.toString()); return TextNormalizationUtilities.removeHyphenLineBreaks(sb.toString()).replaceAll("\n", " ").replaceAll(" {2}", " ");
} }

View File

@ -1,8 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.model.text;
import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
public record AbstractBlockOnPage(AbstractPageBlock block, ClassificationPage page) {
}

View File

@ -1,21 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.model.text;
import com.knecon.fforesight.service.layoutparser.processor.model.FloatFrequencyCounter;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.FieldDefaults;
@Getter
@NoArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class FrequencyCounters {
FloatFrequencyCounter lineHeightFrequencyCounter = new FloatFrequencyCounter();
FloatFrequencyCounter fontSizeFrequencyCounter = new FloatFrequencyCounter();
FloatFrequencyCounter spaceFrequencyCounter = new FloatFrequencyCounter();
StringFrequencyCounter fontFrequencyCounter = new StringFrequencyCounter();
StringFrequencyCounter styleFrequencyCounter = new StringFrequencyCounter();
}

View File

@ -1,107 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.model.text;
import java.util.List;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.experimental.FieldDefaults;
@AllArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class ListIdentifier {
public static final Pattern NUMBER_WITH_DOT = Pattern.compile("^\\s*([1-9]{1,4})\\.\\s+");
public static final Pattern NUMBER_IN_PARENTHESES = Pattern.compile("^\\s*\\(([1-9]{1,4})\\)\\s+");
enum Format {
NUMBER_WITH_DOT,
NUMBER_IN_PARENTHESES
}
Format format;
@Getter
Word word;
@Getter
int page;
int representation;
public static Optional<ListIdentifier> parse(TextPageBlock textPageBlock, int page) {
return parse(textPageBlock.getWords().subList(0, Math.min(5, textPageBlock.getWords().size())), page);
}
public static Optional<ListIdentifier> parse(List<Word> sequences, int page) {
StringBuilder sb = new StringBuilder();
for (Word sequence : sequences) {
sb.append(sequence.toString());
sb.append(" ");
}
sb.replace(sb.length() - 1, sb.length(), "");
String text = sb.toString();
Matcher numberMatcher = NUMBER_WITH_DOT.matcher(text);
if (numberMatcher.find()) {
Optional<Integer> representation = parseInteger(numberMatcher.group(1));
if (representation.isPresent()) {
return Optional.of(new ListIdentifier(Format.NUMBER_WITH_DOT, sequences.get(0), page, representation.get()));
}
}
Matcher parenthesisMatcher = NUMBER_IN_PARENTHESES.matcher(text);
if (parenthesisMatcher.find()) {
Optional<Integer> representation = parseInteger(parenthesisMatcher.group(1));
if (representation.isPresent()) {
return Optional.of(new ListIdentifier(Format.NUMBER_IN_PARENTHESES, sequences.get(0), page, representation.get()));
}
}
return Optional.empty();
}
private static Optional<Integer> parseInteger(String text) {
try {
return Optional.of(Integer.parseInt(text));
} catch (NumberFormatException e) {
return Optional.empty();
}
}
public static boolean isInOrder(List<ListIdentifier> listIdentifiers) {
if (listIdentifiers.size() <= 1) {
return true;
}
for (int i = 1; i < listIdentifiers.size(); i++) {
ListIdentifier current = listIdentifiers.get(i);
ListIdentifier previous = listIdentifiers.get(i - 1);
if (current.format != previous.format) {
return false;
}
if (current.representation <= previous.representation) {
return false;
}
if (!current.word.intersectsXDirAdj(previous.word, 2)) {
return false;
}
if (current.page == previous.page && !current.word.isBelowDirAdj(previous.word)) {
return false;
}
if (current.page < previous.page) {
return false;
}
}
return true;
}
}

View File

@ -7,8 +7,6 @@ import org.apache.pdfbox.text.TextPosition;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.TextBoundingBox; import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.TextBoundingBox;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.utils.FastAtan2; import com.knecon.fforesight.service.layoutparser.processor.docstrum.utils.FastAtan2;
import com.knecon.fforesight.service.layoutparser.processor.utils.CoordinateTransforms;
import com.knecon.fforesight.service.layoutparser.processor.utils.PageInformation;
import lombok.AccessLevel; import lombok.AccessLevel;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;

View File

@ -2,7 +2,6 @@ package com.knecon.fforesight.service.layoutparser.processor.model.text;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.utils.TextNormalizationUtilities; import com.knecon.fforesight.service.layoutparser.processor.utils.TextNormalizationUtilities;
import lombok.Getter; import lombok.Getter;
@ -10,18 +9,18 @@ import lombok.Getter;
@Getter @Getter
public class SearchableText { public class SearchableText {
private final List<Word> sequences = new ArrayList<>(); private final List<TextPositionSequence> sequences = new ArrayList<>();
public void add(Word word) { public void add(TextPositionSequence textPositionSequence) {
sequences.add(word); sequences.add(textPositionSequence);
} }
public void addAll(List<Word> words) { public void addAll(List<TextPositionSequence> textPositionSequences) {
sequences.addAll(words); sequences.addAll(textPositionSequences);
} }
@ -32,14 +31,18 @@ public class SearchableText {
} }
public static String buildString(List<Word> sequences) { public static String buildString(List<TextPositionSequence> sequences) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
for (Word word : sequences) { for (TextPositionSequence word : sequences) {
sb.append(word); sb.append(word);
sb.append(' '); sb.append(' ');
} }
return TextNormalizationUtilities.cleanString(sb.toString()); String text = sb.toString();
text = TextNormalizationUtilities.removeHyphenLineBreaks(text);
text = TextNormalizationUtilities.removeLineBreaks(text);
text = TextNormalizationUtilities.removeRepeatingWhitespaces(text);
return text;
} }
} }

View File

@ -9,14 +9,10 @@ public class StringFrequencyCounter {
@Getter @Getter
private final Map<String, Integer> countPerValue = new HashMap<>(); private final Map<String, Integer> countPerValue = new HashMap<>();
boolean changed;
String mostPopularCache;
public void add(String value) { public void add(String value) {
changed = true;
if (!countPerValue.containsKey(value)) { if (!countPerValue.containsKey(value)) {
countPerValue.put(value, 1); countPerValue.put(value, 1);
} else { } else {
@ -27,8 +23,6 @@ public class StringFrequencyCounter {
public void addAll(Map<String, Integer> otherCounter) { public void addAll(Map<String, Integer> otherCounter) {
changed = true;
for (Map.Entry<String, Integer> entry : otherCounter.entrySet()) { for (Map.Entry<String, Integer> entry : otherCounter.entrySet()) {
if (countPerValue.containsKey(entry.getKey())) { if (countPerValue.containsKey(entry.getKey())) {
countPerValue.put(entry.getKey(), countPerValue.get(entry.getKey()) + entry.getValue()); countPerValue.put(entry.getKey(), countPerValue.get(entry.getKey()) + entry.getValue());
@ -41,18 +35,13 @@ public class StringFrequencyCounter {
public String getMostPopular() { public String getMostPopular() {
if (changed || mostPopularCache == null) { Map.Entry<String, Integer> mostPopular = null;
Map.Entry<String, Integer> mostPopular = null; for (Map.Entry<String, Integer> entry : countPerValue.entrySet()) {
for (Map.Entry<String, Integer> entry : countPerValue.entrySet()) { if (mostPopular == null || entry.getValue() > mostPopular.getValue()) {
if (mostPopular == null || entry.getValue() >= mostPopular.getValue()) { mostPopular = entry;
mostPopular = entry;
}
} }
mostPopularCache = mostPopular != null ? mostPopular.getKey() : null;
changed = false;
} }
return mostPopular != null ? mostPopular.getKey() : null;
return mostPopularCache;
} }
} }

View File

@ -1,7 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.model.text;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
public record TextBlockOnPage(TextPageBlock textBlock, ClassificationPage page) {
}

View File

@ -2,11 +2,11 @@ package com.knecon.fforesight.service.layoutparser.processor.model.text;
import java.awt.geom.Rectangle2D; import java.awt.geom.Rectangle2D;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.FloatFrequencyCounter;
import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType; import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType;
import com.knecon.fforesight.service.layoutparser.processor.utils.RectangleTransformations; import com.knecon.fforesight.service.layoutparser.processor.utils.RectangleTransformations;
import com.knecon.fforesight.service.layoutparser.processor.utils.TextNormalizationUtilities; import com.knecon.fforesight.service.layoutparser.processor.utils.TextNormalizationUtilities;
@ -25,59 +25,56 @@ import lombok.NoArgsConstructor;
public class TextPageBlock extends AbstractPageBlock { public class TextPageBlock extends AbstractPageBlock {
@Builder.Default @Builder.Default
private List<Word> words = new ArrayList<>(); private List<TextPositionSequence> sequences = new ArrayList<>();
@Builder.Default
private FrequencyCounters frequencyCounters = new FrequencyCounters();
private Rectangle2D bBoxDirAdj; private Rectangle2D bBoxDirAdj;
private boolean underlined; private String mostPopularWordFont;
private String mostPopularWordStyle;
private double mostPopularWordFontSize;
private double mostPopularWordHeight;
private double mostPopularWordSpaceWidth;
private double highestFontSize;
private PageBlockType classification; private PageBlockType classification;
private boolean toDuplicate; private boolean toDuplicate;
private String text;
private boolean changed;
public TextPageBlock(List<TextPositionSequence> sequences) {
public TextPageBlock(List<Word> words) { this.sequences = sequences;
if (!sequences.isEmpty()) {
this.words = new ArrayList<>(words); calculateFrequencyCounters();
this.frequencyCounters = new FrequencyCounters();
if (!words.isEmpty()) {
addToFrequencyCounters(words);
} }
calculateBBox(); calculateBBox();
} }
public List<Word> getWords() {
return Collections.unmodifiableList(words);
}
public TextDirection getDir() { public TextDirection getDir() {
return words.get(0).getDir(); return sequences.get(0).getDir();
} }
private void calculateBBox() { private void calculateBBox() {
if (words == null) { if (sequences == null) {
this.bBox = new Rectangle2D.Double(); this.bBox = new Rectangle2D.Double();
this.bBoxPdf = new Rectangle2D.Double(); this.bBoxPdf = new Rectangle2D.Double();
this.bBoxDirAdj = new Rectangle2D.Double(); this.bBoxDirAdj = new Rectangle2D.Double();
return; return;
} }
this.bBoxDirAdj = words.stream() this.bBoxDirAdj = sequences.stream()
.map(Word::getBBoxDirAdj) .map(TextPositionSequence::getBBoxDirAdj)
.collect(RectangleTransformations.collectBBox()); .collect(RectangleTransformations.collectBBox());
setToBBoxOfComponents(words); setToBBoxOfComponents(sequences);
} }
@ -99,8 +96,8 @@ public class TextPageBlock extends AbstractPageBlock {
throw new IllegalArgumentException("Cannot merge textBlocks on different pages."); throw new IllegalArgumentException("Cannot merge textBlocks on different pages.");
} }
List<Word> sequences = textBlocksToMerge.stream() List<TextPositionSequence> sequences = textBlocksToMerge.stream()
.map(TextPageBlock::getWords) .map(TextPageBlock::getSequences)
.flatMap(java.util.Collection::stream) .flatMap(java.util.Collection::stream)
.toList(); .toList();
sequences = new ArrayList<>(sequences); sequences = new ArrayList<>(sequences);
@ -109,27 +106,38 @@ public class TextPageBlock extends AbstractPageBlock {
} }
private void addToFrequencyCounters(List<Word> sequences) { private void calculateFrequencyCounters() {
for (Word wordBlock : sequences) { FloatFrequencyCounter lineHeightFrequencyCounter = new FloatFrequencyCounter();
FloatFrequencyCounter fontSizeFrequencyCounter = new FloatFrequencyCounter();
FloatFrequencyCounter spaceFrequencyCounter = new FloatFrequencyCounter();
StringFrequencyCounter fontFrequencyCounter = new StringFrequencyCounter();
StringFrequencyCounter styleFrequencyCounter = new StringFrequencyCounter();
for (TextPositionSequence wordBlock : sequences) {
lineHeightFrequencyCounter.add(wordBlock.getTextHeight());
fontSizeFrequencyCounter.add(wordBlock.getFontSize());
spaceFrequencyCounter.add(wordBlock.getSpaceWidth());
fontFrequencyCounter.add(wordBlock.getFont());
styleFrequencyCounter.add(wordBlock.getFontStyle());
frequencyCounters.getLineHeightFrequencyCounter().add(wordBlock.getTextHeight());
frequencyCounters.getFontSizeFrequencyCounter().add(wordBlock.getFontSize());
frequencyCounters.getSpaceFrequencyCounter().add(wordBlock.getSpaceWidth());
frequencyCounters.getFontFrequencyCounter().add(wordBlock.getFont());
frequencyCounters.getStyleFrequencyCounter().add(wordBlock.getFontStyle());
} }
setUnderlined(this.words.stream() setMostPopularWordFont(fontFrequencyCounter.getMostPopular());
.allMatch(Word::isUnderline)); setMostPopularWordStyle(styleFrequencyCounter.getMostPopular());
setMostPopularWordFontSize(fontSizeFrequencyCounter.getMostPopular());
setMostPopularWordHeight(lineHeightFrequencyCounter.getMostPopular());
setMostPopularWordSpaceWidth(spaceFrequencyCounter.getMostPopular());
setHighestFontSize(fontSizeFrequencyCounter.getHighest());
} }
public TextPageBlock union(Word r) { public TextPageBlock union(TextPositionSequence r) {
TextPageBlock union = this.copy(); TextPageBlock union = this.copy();
union.add(r); union.getSequences().add(r);
addToFrequencyCounters(List.of(r)); calculateFrequencyCounters();
calculateBBox(); calculateBBox();
return union; return union;
} }
@ -138,50 +146,51 @@ public class TextPageBlock extends AbstractPageBlock {
public TextPageBlock union(TextPageBlock r) { public TextPageBlock union(TextPageBlock r) {
TextPageBlock union = this.copy(); TextPageBlock union = this.copy();
union.addAll(r.getWords()); union.getSequences().addAll(r.getSequences());
addToFrequencyCounters(r.getWords()); calculateFrequencyCounters();
calculateBBox(); calculateBBox();
return union; return union;
} }
public void add(TextPageBlock textPageBlock) { public void add(TextPageBlock r) {
changed = true; sequences.addAll(r.getSequences());
words.addAll(textPageBlock.getWords()); calculateFrequencyCounters();
addToFrequencyCounters(textPageBlock.getWords());
calculateBBox(); calculateBBox();
} }
public void add(Word word) { public void add(TextPositionSequence r) {
changed = true; sequences.add(r);
words.add(word); calculateFrequencyCounters();
addToFrequencyCounters(List.of(word));
calculateBBox();
}
public void addAll(List<Word> words) {
changed = true;
this.words.addAll(words);
addToFrequencyCounters(words);
calculateBBox(); calculateBBox();
} }
public TextPageBlock copy() { public TextPageBlock copy() {
return new TextPageBlock(new ArrayList<>(words)); return new TextPageBlock(new ArrayList<>(sequences));
} }
@Override @Override
public String toString() { public String toString() {
return getText(); StringBuilder builder = new StringBuilder();
for (int i = 0; i < sequences.size(); i++) {
String sequenceAsString = sequences.get(i).toString();
// Fix for missing Whitespace. This is recognized in getSequences method. See PDFTextStripper Line 1730.
if (i != 0 && sequences.get(i - 1).charAt(sequences.get(i - 1).length() - 1) != ' ' && sequenceAsString.charAt(0) != ' ') {
builder.append(' ');
}
builder.append(sequenceAsString);
}
return builder.toString();
} }
@ -189,36 +198,30 @@ public class TextPageBlock extends AbstractPageBlock {
@JsonIgnore @JsonIgnore
public String getText() { public String getText() {
if (text == null || changed) { StringBuilder sb = new StringBuilder();
StringBuilder sb = new StringBuilder(); TextPositionSequence previous = null;
for (TextPositionSequence word : sequences) {
Word previous = null; if (previous != null) {
for (Word word : words) { if (Math.abs(previous.getMaxYDirAdj() - word.getMaxYDirAdj()) > word.getTextHeight()) {
if (previous != null) { sb.append('\n');
if (Math.abs(previous.getMaxYDirAdj() - word.getMaxYDirAdj()) > word.getTextHeight()) { } else {
sb.append('\n'); sb.append(' ');
} else {
sb.append(' ');
}
} }
sb.append(word.toString());
previous = word;
} }
sb.append(word.toString());
text = TextNormalizationUtilities.removeHyphenLinebreaks(sb.toString()); previous = word;
changed = false;
} }
return text; return TextNormalizationUtilities.removeHyphenLineBreaks(sb.toString());
} }
public int getNumberOfLines() { public int getNumberOfLines() {
int numberOfLines = 1; int numberOfLines = 1;
Word previous = null; TextPositionSequence previous = null;
for (Word word : words) { for (TextPositionSequence word : sequences) {
if (previous != null) { if (previous != null) {
if (word.getMaxYDirAdj() - previous.getMaxYDirAdj() > word.getTextHeight()) { if (word.getMaxYDirAdj() - previous.getMaxYDirAdj() > word.getTextHeight()) {
numberOfLines++; numberOfLines++;
@ -230,47 +233,10 @@ public class TextPageBlock extends AbstractPageBlock {
} }
public String getMostPopularWordFont() {
return frequencyCounters.getFontFrequencyCounter().getMostPopular();
}
public String getMostPopularWordStyle() {
return frequencyCounters.getStyleFrequencyCounter().getMostPopular();
}
public double getMostPopularWordFontSize() {
return frequencyCounters.getFontSizeFrequencyCounter().getMostPopular();
}
public double getMostPopularWordHeight() {
return frequencyCounters.getLineHeightFrequencyCounter().getMostPopular();
}
public double getMostPopularWordSpaceWidth() {
return frequencyCounters.getSpaceFrequencyCounter().getMostPopular();
}
public double getHighestFontSize() {
Double highest = frequencyCounters.getFontSizeFrequencyCounter().getHighest();
return highest == null ? 0 : highest;
}
@Override @Override
public boolean isEmpty() { public boolean isEmpty() {
return words.isEmpty(); return sequences.isEmpty();
} }
} }

View File

@ -0,0 +1,190 @@
package com.knecon.fforesight.service.layoutparser.processor.model.text;
import static com.knecon.fforesight.service.layoutparser.processor.model.text.RedTextPosition.HEIGHT_PADDING;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.stream.Collectors;
import org.apache.pdfbox.text.TextPosition;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.TextBoundingBox;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = true) // needs the bbox to be unique
public class TextPositionSequence extends TextBoundingBox implements CharSequence {
public static final String STANDARD = "standard";
public static final String BOLD_ITALIC = "bold, italic";
public static final String BOLD = "bold";
public static final String ITALIC = "italic";
@EqualsAndHashCode.Include
private int page;
@EqualsAndHashCode.Include
@Builder.Default
private List<RedTextPosition> textPositions = new ArrayList<>();
private boolean isParagraphStart;
private boolean strikethrough;
private boolean underline;
public TextPositionSequence(List<TextPosition> textPositions, int pageNumber, boolean isParagraphStart) {
this.textPositions = textPositions.stream()
.map(RedTextPosition::fromTextPosition)
.collect(Collectors.toList());
this.page = pageNumber;
this.isParagraphStart = isParagraphStart;
calculateBBox();
}
private void calculateBBox() {
setToBBoxOfComponents(getTextPositions());
}
public TextPositionSequence(List<RedTextPosition> textPositions, int page) {
this.textPositions = textPositions;
this.page = page;
calculateBBox();
}
@Override
public int length() {
return textPositions.size();
}
@Override
public char charAt(int index) {
RedTextPosition textPosition = textPositionAt(index);
String text = textPosition.getUnicode();
return text.charAt(0);
}
public char charAt(int index, boolean caseInSensitive) {
RedTextPosition textPosition = textPositionAt(index);
String text = textPosition.getUnicode();
return caseInSensitive ? text.toLowerCase(Locale.ROOT).charAt(0) : text.charAt(0);
}
@Override
public TextPositionSequence subSequence(int start, int end) {
var textPositionSequence = new TextPositionSequence();
textPositionSequence.textPositions = textPositions.subList(start, end);
textPositionSequence.page = page;
textPositionSequence.dir = dir;
textPositionSequence.setToBBoxOfComponents(getTextPositions());
return textPositionSequence;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder(length());
for (int i = 0; i < length(); i++) {
builder.append(charAt(i));
}
return builder.toString();
}
public RedTextPosition textPositionAt(int index) {
return textPositions.get(index);
}
public void add(TextPositionSequence textPositionSequence, RedTextPosition textPosition) {
this.textPositions.add(textPosition);
this.page = textPositionSequence.getPage();
calculateBBox();
}
public void add(TextPosition textPosition) {
this.textPositions.add(RedTextPosition.fromTextPosition(textPosition));
calculateBBox();
}
public double getTextHeightNoPadding() {
return textPositions.get(0).getHeightDirAdj();
}
public double getTextHeight() {
return textPositions.get(0).getHeightDirAdj() + HEIGHT_PADDING;
}
public String getFont() {
if (textPositions.get(0).getFontName() == null) {
return "none";
}
return textPositions.get(0).getFontName().toLowerCase(Locale.ROOT).replaceAll(",bold", "").replaceAll(",italic", "");
}
public String getFontStyle() {
if (textPositions.get(0).getFontName() == null) {
return STANDARD;
}
String lowercaseFontName = textPositions.get(0).getFontName().toLowerCase(Locale.ROOT);
if (lowercaseFontName.contains(BOLD) && lowercaseFontName.contains(ITALIC)) {
return BOLD_ITALIC;
} else if (lowercaseFontName.contains(BOLD)) {
return BOLD;
} else if (lowercaseFontName.contains(ITALIC)) {
return ITALIC;
} else {
return STANDARD;
}
}
public float getFontSize() {
return textPositions.get(0).getFontSizeInPt();
}
public float getSpaceWidth() {
return textPositions.get(0).getWidthOfSpace();
}
}

View File

@ -1,36 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.model.text;
import java.util.Comparator;
import java.util.HashMap;
import com.knecon.fforesight.service.layoutparser.processor.services.classification.NumberWord;
public class TocNumberComparator implements Comparator<NumberWord> {
private HashMap<NumberWord, TextBlockOnPage> lookup;
public TocNumberComparator(HashMap<NumberWord, TextBlockOnPage> lookup) {
this.lookup = lookup;
}
@Override
public int compare(NumberWord number1, NumberWord number2) {
int page1 = lookup.get(number1).page().getPageNumber();
int page2 = lookup.get(number2).page().getPageNumber();
if (page1 != page2) {
return Integer.compare(page1, page2);
}
if (number1.word().getY() != number2.word().getY()) {
return Double.compare(number1.word().getY(), number2.word().getY());
}
return Integer.compare(number1.number(), number2.number());
}
}

View File

@ -1,272 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.model.text;
import static com.knecon.fforesight.service.layoutparser.processor.model.text.RedTextPosition.HEIGHT_PADDING;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.pdfbox.text.TextPosition;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.Character;
import com.knecon.fforesight.service.layoutparser.processor.docstrum.model.TextBoundingBox;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@SuppressWarnings("pmd")
public class Word extends TextBoundingBox implements CharSequence {
public static final String STANDARD = "standard";
public static final String BOLD_ITALIC = "bold, italic";
public static final String BOLD = "bold";
public static final String ITALIC = "italic";
public static final Pattern FONT_CLEANER = Pattern.compile(",bold|,italic");
private int page;
@Builder.Default
private List<Character> characters = new ArrayList<>();
private boolean isParagraphStart;
private boolean strikethrough;
private boolean underline;
private Integer hashcodeCache;
public Word(List<TextPosition> textPositions, int pageNumber, boolean isParagraphStart) {
this.characters = textPositions.stream()
.map(RedTextPosition::fromTextPosition)
.map(Character::new)
.collect(Collectors.toList());
this.page = pageNumber;
this.isParagraphStart = isParagraphStart;
calculateBBoxAndHashcode();
}
private void calculateBBoxAndHashcode() {
setToBBoxOfComponents(getTextPositions());
hashcodeCache = null;
}
public Word(List<Character> textPositions, int page) {
this.characters = new ArrayList<>(textPositions);
this.page = page;
calculateBBoxAndHashcode();
}
@Override
public int length() {
return characters.size();
}
@Override
public char charAt(int index) {
RedTextPosition textPosition = textPositionAt(index);
String text = textPosition.getUnicode();
return text.charAt(0);
}
public char charAt(int index, boolean caseInSensitive) {
RedTextPosition textPosition = textPositionAt(index);
String text = textPosition.getUnicode();
return caseInSensitive ? text.toLowerCase(Locale.ROOT).charAt(0) : text.charAt(0);
}
@Override
public Word subSequence(int start, int end) {
var textPositionSequence = new Word();
textPositionSequence.characters = characters.subList(start, end);
textPositionSequence.page = page;
textPositionSequence.dir = dir;
textPositionSequence.setToBBoxOfComponents(getTextPositions());
return textPositionSequence;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder(length());
for (int i = 0; i < length(); i++) {
builder.append(charAt(i));
}
return builder.toString();
}
public RedTextPosition textPositionAt(int index) {
return characters.get(index).getTextPosition();
}
public void add(Word word, RedTextPosition textPosition) {
this.characters.add(new Character(textPosition));
this.page = word.getPage();
calculateBBoxAndHashcode();
}
public void add(Character current) {
characters.add(current);
calculateBBoxAndHashcode();
}
public void add(TextPosition textPosition) {
add(new Character(RedTextPosition.fromTextPosition(textPosition)));
}
public double getTextHeightNoPadding() {
return characters.get(0).getTextPosition().getHeightDirAdj();
}
public double getTextHeight() {
return characters.get(0).getTextPosition().getHeightDirAdj() + HEIGHT_PADDING;
}
public String getFont() {
if (characters.get(0).getTextPosition().getFontName() == null) {
return "none";
}
return FONT_CLEANER.matcher(characters.get(0).getTextPosition().getFontName().toLowerCase(Locale.ROOT)).replaceAll("");
}
public String getFontStyle() {
if (characters.get(0).getTextPosition().getFontName() == null) {
return STANDARD;
}
String lowercaseFontName = characters.get(0).getTextPosition().getFontName().toLowerCase(Locale.ROOT);
if (lowercaseFontName.contains(BOLD) && lowercaseFontName.contains(ITALIC)) {
return BOLD_ITALIC;
} else if (lowercaseFontName.contains(BOLD)) {
return BOLD;
} else if (lowercaseFontName.contains(ITALIC)) {
return ITALIC;
} else {
return STANDARD;
}
}
public float getFontSize() {
return characters.get(0).getTextPosition().getFontSizeInPt();
}
public float getSpaceWidth() {
return characters.get(0).getTextPosition().getWidthOfSpace();
}
public boolean equals(final Object o) {
// auto-generated with lombok
if (o == this) {
return true;
}
if (!(o instanceof Word other)) {
return false;
}
if (!other.canEqual((Object) this)) {
return false;
}
if (!super.equals(o)) {
return false;
}
if (this.getPage() != other.getPage()) {
return false;
}
final Object this$textPositions = this.getTextPositions();
final Object other$textPositions = other.getTextPositions();
if (!Objects.equals(this$textPositions, other$textPositions)) {
return false;
}
return Objects.equals(this.getHashcodeCache(), other.getHashcodeCache());
}
protected boolean canEqual(final Object other) {return other instanceof Word;}
public int hashCode() {
if (hashcodeCache == null) {
hashcodeCache = hashcodeCalculation();
}
return hashcodeCache;
}
private int hashcodeCalculation() {
final int PRIME = 59;
int result = super.hashCode();
result = result * PRIME + this.getPage();
final Object $textPositions = this.getTextPositions();
result = result * PRIME + ($textPositions == null ? 43 : $textPositions.hashCode());
return result;
}
private List<RedTextPosition> getTextPositions() {
return characters.stream()
.map(Character::getTextPosition)
.toList();
}
public void transform(AffineTransform rotateInstance) {
for (RedTextPosition textPosition : getTextPositions()) {
Rectangle2D exactDirAdjCoordinates = rotateInstance.createTransformedShape(textPosition.getBBoxDirAdj()).getBounds2D();
textPosition.setBBoxDirAdj(exactDirAdjCoordinates);
}
calculateBBoxAndHashcode();
}
}

View File

@ -9,9 +9,9 @@ import java.util.Map;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.ImageType;
import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage; import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.ImageType;
import com.knecon.fforesight.service.layoutparser.processor.model.image.ClassifiedImage; import com.knecon.fforesight.service.layoutparser.processor.model.image.ClassifiedImage;
import com.knecon.fforesight.service.layoutparser.processor.python_api.model.image.ImageServiceResponse; import com.knecon.fforesight.service.layoutparser.processor.python_api.model.image.ImageServiceResponse;

View File

@ -8,7 +8,7 @@ import java.util.Map;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.ImageType; import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.ImageType;
import com.knecon.fforesight.service.layoutparser.processor.model.image.ClassifiedImage; import com.knecon.fforesight.service.layoutparser.processor.model.image.ClassifiedImage;
import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.VisualLayoutParsingBox; import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.VisualLayoutParsingBox;
import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.VisualLayoutParsingResponse; import com.knecon.fforesight.service.layoutparser.processor.python_api.model.table.VisualLayoutParsingResponse;
@ -79,7 +79,7 @@ public class VisualLayoutParsingAdapter {
ClassifiedImage signature = new ClassifiedImage(new Rectangle2D.Float(t.getBox().getX1(), ClassifiedImage signature = new ClassifiedImage(new Rectangle2D.Float(t.getBox().getX1(),
t.getBox().getY1(), t.getBox().getY1(),
t.getBox().getX2() - t.getBox().getX1(), t.getBox().getX2() - t.getBox().getX1(),
t.getBox().getY2() - t.getBox().getY1()), ImageType.SIGNATURE, true, false, false, pageNumber, ""); t.getBox().getY2() - t.getBox().getY1()), ImageType.SIGNATURE, true, false, false, pageNumber,"");
signatures.add(signature); signatures.add(signature);
} }

View File

@ -23,11 +23,11 @@ public class DividingColumnDetectionService {
public List<Rectangle2D> detectColumns(PageContents pageContents) { public List<Rectangle2D> detectColumns(PageContents pageContents) {
if (pageContents.getSortedWords().size() < 2) { if (pageContents.getSortedTextPositionSequences().size() < 2) {
return List.of(pageContents.getCropBox()); return List.of(pageContents.getCropBox());
} }
GapInformation linesWithGapInformation = GapDetectionService.findGapsInLines(pageContents.getSortedWords(), pageContents.getCropBox()); GapInformation linesWithGapInformation = GapDetectionService.findGapsInLines(pageContents.getSortedTextPositionSequences(), pageContents.getCropBox());
return detectColumnsFromLines(linesWithGapInformation.getXGaps(), pageContents.getCropBox()); return detectColumnsFromLines(linesWithGapInformation.getXGaps(), pageContents.getCropBox());
} }

View File

@ -5,7 +5,7 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.model.GapInformation; import com.knecon.fforesight.service.layoutparser.processor.model.GapInformation;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.experimental.UtilityClass; import lombok.experimental.UtilityClass;
@ -18,23 +18,23 @@ public class GapDetectionService {
private static final double NEW_LINE_FACTOR = 0.2; private static final double NEW_LINE_FACTOR = 0.2;
public static GapInformation findGapsInLines(List<Word> sortedWords, Rectangle2D mainBodyTextFrame) { public static GapInformation findGapsInLines(List<TextPositionSequence> sortedTextPositionSequences, Rectangle2D mainBodyTextFrame) {
if (sortedWords.isEmpty()) { if (sortedTextPositionSequences.isEmpty()) {
return new GapInformation(); return new GapInformation();
} }
final double avgTextPositionHeight = getAvgTextPositionHeight(sortedWords); final double avgTextPositionHeight = getAvgTextPositionHeight(sortedTextPositionSequences);
XGapsContext xGapContext = XGapsContext.init(mainBodyTextFrame); XGapsContext xGapContext = XGapsContext.init(mainBodyTextFrame);
YGapsContext yGapContext = YGapsContext.init(mainBodyTextFrame); YGapsContext yGapContext = YGapsContext.init(mainBodyTextFrame);
var previousTextPosition = sortedWords.get(0); var previousTextPosition = sortedTextPositionSequences.get(0);
Rectangle2D rectangle = toRectangle2D(previousTextPosition); Rectangle2D rectangle = toRectangle2D(previousTextPosition);
xGapContext.addGapFromLeftEdgeOfMainBody(rectangle); xGapContext.addGapFromLeftEdgeOfMainBody(rectangle);
for (Word currentTextPosition : sortedWords.subList(1, sortedWords.size())) { for (TextPositionSequence currentTextPosition : sortedTextPositionSequences.subList(1, sortedTextPositionSequences.size())) {
double yDifference = Math.abs(currentTextPosition.getMaxYDirAdj() - previousTextPosition.getMaxYDirAdj()); double yDifference = Math.abs(currentTextPosition.getMaxYDirAdj() - previousTextPosition.getMaxYDirAdj());
double xGap = Math.abs(previousTextPosition.getMaxXDirAdj() - currentTextPosition.getXDirAdj()); double xGap = Math.abs(previousTextPosition.getMaxXDirAdj() - currentTextPosition.getXDirAdj());
@ -59,14 +59,14 @@ public class GapDetectionService {
} }
previousTextPosition = currentTextPosition; previousTextPosition = currentTextPosition;
} }
xGapContext.addGapToRightEdgeOfMainBody(toRectangle2D(sortedWords.get(sortedWords.size() - 1))); xGapContext.addGapToRightEdgeOfMainBody(toRectangle2D(sortedTextPositionSequences.get(sortedTextPositionSequences.size() - 1)));
xGapContext.gapsPerLine.add(xGapContext.gapsInCurrentLine); xGapContext.gapsPerLine.add(xGapContext.gapsInCurrentLine);
return new GapInformation(xGapContext.gapsPerLine, yGapContext.gapsPerLine); return new GapInformation(xGapContext.gapsPerLine, yGapContext.gapsPerLine);
} }
private static Rectangle2D toRectangle2D(Word textPosition) { private static Rectangle2D toRectangle2D(TextPositionSequence textPosition) {
return mirrorY(textPosition.getBBox()); return mirrorY(textPosition.getBBox());
} }
@ -87,18 +87,18 @@ public class GapDetectionService {
} }
private static void assertAllTextPositionsHaveSameDir(List<Word> words) { private static void assertAllTextPositionsHaveSameDir(List<TextPositionSequence> textPositionSequences) {
assert words.stream() assert textPositionSequences.stream()
.map(Word::getDir) .map(TextPositionSequence::getDir)
.allMatch(a -> a.equals(words.get(0).getDir())); .allMatch(a -> a.equals(textPositionSequences.get(0).getDir()));
} }
private static double getAvgTextPositionHeight(List<Word> words) { private static double getAvgTextPositionHeight(List<TextPositionSequence> textPositionSequences) {
return words.stream() return textPositionSequences.stream()
.mapToDouble(Word::getHeight).average().orElseThrow(); .mapToDouble(TextPositionSequence::getHeight).average().orElseThrow();
} }

View File

@ -7,17 +7,17 @@ import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.model.GapInformation; import com.knecon.fforesight.service.layoutparser.processor.model.GapInformation;
import com.knecon.fforesight.service.layoutparser.processor.model.LineInformation; import com.knecon.fforesight.service.layoutparser.processor.model.LineInformation;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import lombok.experimental.UtilityClass; import lombok.experimental.UtilityClass;
@UtilityClass @UtilityClass
public class InvisibleTableDetectionService { public class InvisibleTableDetectionService {
public List<List<Rectangle2D>> detectTable(List<Word> words, Rectangle2D tableBBox) { public List<List<Rectangle2D>> detectTable(List<TextPositionSequence> textPositionSequences, Rectangle2D tableBBox) {
LineInformation lineInformation = LineDetectionService.calculateLineInformation(words); LineInformation lineInformation = LineDetectionService.calculateLineInformation(textPositionSequences);
GapInformation gaps = GapDetectionService.findGapsInLines(words, tableBBox); GapInformation gaps = GapDetectionService.findGapsInLines(textPositionSequences, tableBBox);
List<Rectangle2D> gapsAcrossLines = GapsAcrossLinesService.detectXGapsAcrossLines(gaps, tableBBox); List<Rectangle2D> gapsAcrossLines = GapsAcrossLinesService.detectXGapsAcrossLines(gaps, tableBBox);
List<Double> columnXCoords = gapsAcrossLines.stream().map(RectangularShape::getCenterX).toList(); List<Double> columnXCoords = gapsAcrossLines.stream().map(RectangularShape::getCenterX).toList();
int colCount = gapsAcrossLines.size(); int colCount = gapsAcrossLines.size();

View File

@ -7,7 +7,7 @@ import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.model.GapInformation; import com.knecon.fforesight.service.layoutparser.processor.model.GapInformation;
import com.knecon.fforesight.service.layoutparser.processor.model.LineInformation; import com.knecon.fforesight.service.layoutparser.processor.model.LineInformation;
import com.knecon.fforesight.service.layoutparser.processor.utils.RectangleTransformations; import com.knecon.fforesight.service.layoutparser.processor.utils.RectangleTransformations;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
@ -19,37 +19,37 @@ public class LineDetectionService {
private static final double X_GAP_FACTOR = 1; // multiplied with average text height, determines the minimum distance of gaps in lines private static final double X_GAP_FACTOR = 1; // multiplied with average text height, determines the minimum distance of gaps in lines
public LineInformation calculateLineInformation(List<Word> sortedWords) { public LineInformation calculateLineInformation(List<TextPositionSequence> sortedTextPositionSequences) {
if (sortedWords.isEmpty()) { if (sortedTextPositionSequences.isEmpty()) {
return LineFactory.init().build(); return LineFactory.init().build();
} }
return buildLineInformation(sortedWords); return buildLineInformation(sortedTextPositionSequences);
} }
public List<List<Rectangle2D>> findLinesWithGaps(List<Word> sortedWords, Rectangle2D mainBodyTextFrame) { public List<List<Rectangle2D>> findLinesWithGaps(List<TextPositionSequence> sortedTextPositionSequences, Rectangle2D mainBodyTextFrame) {
return calculateLineInformation(sortedWords).getBBoxWithGapsByLines(); return calculateLineInformation(sortedTextPositionSequences).getBBoxWithGapsByLines();
} }
public List<List<Word>> orderByLines(List<Word> sortedWords, Rectangle2D mainBodyTextFrame) { public List<List<TextPositionSequence>> orderByLines(List<TextPositionSequence> sortedTextPositionSequences, Rectangle2D mainBodyTextFrame) {
return calculateLineInformation(sortedWords).getSequencesByLines(); return calculateLineInformation(sortedTextPositionSequences).getSequencesByLines();
} }
private static LineInformation buildLineInformation(List<Word> sortedWords) { private static LineInformation buildLineInformation(List<TextPositionSequence> sortedTextPositionSequences) {
final double avgTextPositionHeight = getAvgTextPositionHeight(sortedWords); final double avgTextPositionHeight = getAvgTextPositionHeight(sortedTextPositionSequences);
LineFactory lineFactory = LineFactory.init(); LineFactory lineFactory = LineFactory.init();
var previousTextPosition = sortedWords.get(0); var previousTextPosition = sortedTextPositionSequences.get(0);
lineFactory.addToCurrentLine(previousTextPosition); lineFactory.addToCurrentLine(previousTextPosition);
for (Word currentTextPosition : sortedWords.subList(1, sortedWords.size())) { for (TextPositionSequence currentTextPosition : sortedTextPositionSequences.subList(1, sortedTextPositionSequences.size())) {
if (isNewLine(currentTextPosition, previousTextPosition, avgTextPositionHeight) || isSplitByOrientation(currentTextPosition, previousTextPosition)) { if (isNewLine(currentTextPosition, previousTextPosition, avgTextPositionHeight) || isSplitByOrientation(currentTextPosition, previousTextPosition)) {
lineFactory.startNewLine(); lineFactory.startNewLine();
} else if (isXGap(currentTextPosition, previousTextPosition, avgTextPositionHeight)) { } else if (isXGap(currentTextPosition, previousTextPosition, avgTextPositionHeight)) {
@ -63,25 +63,25 @@ public class LineDetectionService {
} }
private static double getAvgTextPositionHeight(List<Word> words) { private static double getAvgTextPositionHeight(List<TextPositionSequence> textPositionSequences) {
return words.stream().mapToDouble(Word::getHeight).average().orElseThrow(); return textPositionSequences.stream().mapToDouble(TextPositionSequence::getHeight).average().orElseThrow();
} }
private static boolean isXGap(Word currentTextPosition, Word previousTextPosition, double avgTextPositionHeight) { private static boolean isXGap(TextPositionSequence currentTextPosition, TextPositionSequence previousTextPosition, double avgTextPositionHeight) {
return Math.abs(previousTextPosition.getMaxXDirAdj() - currentTextPosition.getXDirAdj()) > (avgTextPositionHeight * X_GAP_FACTOR); return Math.abs(previousTextPosition.getMaxXDirAdj() - currentTextPosition.getXDirAdj()) > (avgTextPositionHeight * X_GAP_FACTOR);
} }
private static boolean isSplitByOrientation(Word currentTextPosition, Word previousTextPosition) { private static boolean isSplitByOrientation(TextPositionSequence currentTextPosition, TextPositionSequence previousTextPosition) {
return !previousTextPosition.getDir().equals(currentTextPosition.getDir()); return !previousTextPosition.getDir().equals(currentTextPosition.getDir());
} }
private static boolean isNewLine(Word currentTextPosition, Word previousTextPosition, double avgTextPositionHeight) { private static boolean isNewLine(TextPositionSequence currentTextPosition, TextPositionSequence previousTextPosition, double avgTextPositionHeight) {
return Math.abs(previousTextPosition.getYDirAdj() - currentTextPosition.getYDirAdj()) > avgTextPositionHeight; return Math.abs(previousTextPosition.getYDirAdj() - currentTextPosition.getYDirAdj()) > avgTextPositionHeight;
} }
@ -96,13 +96,13 @@ public class LineDetectionService {
List<List<Rectangle2D>> bBoxWithGapsByLines; List<List<Rectangle2D>> bBoxWithGapsByLines;
List<Rectangle2D> bBoxWithGapsInCurrentLine; List<Rectangle2D> bBoxWithGapsInCurrentLine;
List<List<List<Word>>> sequencesWithGapsByLines; List<List<List<TextPositionSequence>>> sequencesWithGapsByLines;
List<List<Word>> sequencesWithGapsInCurrentLine; List<List<TextPositionSequence>> sequencesWithGapsInCurrentLine;
List<Word> currentSequencesWithoutGaps; List<TextPositionSequence> currentSequencesWithoutGaps;
List<List<Word>> sequencesByLines; List<List<TextPositionSequence>> sequencesByLines;
List<Word> sequencesInCurrentLine; List<TextPositionSequence> sequencesInCurrentLine;
List<List<Rectangle2D>> xGaps; List<List<Rectangle2D>> xGaps;
List<List<Rectangle2D>> yGaps; List<List<Rectangle2D>> yGaps;
@ -116,14 +116,14 @@ public class LineDetectionService {
List<Rectangle2D> bBoxWithGapsInCurrentLine = new LinkedList<>(); List<Rectangle2D> bBoxWithGapsInCurrentLine = new LinkedList<>();
bBoxWithGapsByLines.add(bBoxWithGapsInCurrentLine); bBoxWithGapsByLines.add(bBoxWithGapsInCurrentLine);
List<List<List<Word>>> sequencesWithGapsByLines = new LinkedList<>(); List<List<List<TextPositionSequence>>> sequencesWithGapsByLines = new LinkedList<>();
List<List<Word>> sequencesWithGapsInCurrentLine = new LinkedList<>(); List<List<TextPositionSequence>> sequencesWithGapsInCurrentLine = new LinkedList<>();
sequencesWithGapsByLines.add(sequencesWithGapsInCurrentLine); sequencesWithGapsByLines.add(sequencesWithGapsInCurrentLine);
List<Word> currentSequencesWithoutGaps = new LinkedList<>(); List<TextPositionSequence> currentSequencesWithoutGaps = new LinkedList<>();
sequencesWithGapsInCurrentLine.add(currentSequencesWithoutGaps); sequencesWithGapsInCurrentLine.add(currentSequencesWithoutGaps);
List<List<Word>> sequencesByLines = new LinkedList<>(); List<List<TextPositionSequence>> sequencesByLines = new LinkedList<>();
List<Word> sequencesInCurrentLine = new LinkedList<>(); List<TextPositionSequence> sequencesInCurrentLine = new LinkedList<>();
sequencesByLines.add(sequencesInCurrentLine); sequencesByLines.add(sequencesInCurrentLine);
return new LineFactory(lineBBox, return new LineFactory(lineBBox,
@ -178,13 +178,13 @@ public class LineDetectionService {
} }
private Rectangle2D textPositionBBox(List<Word> words) { private Rectangle2D textPositionBBox(List<TextPositionSequence> textPositionSequences) {
return RectangleTransformations.rectangle2DBBox(words.stream().map(Word::getBBox).toList()); return RectangleTransformations.rectangle2DBBox(textPositionSequences.stream().map(TextPositionSequence::getBBox).toList());
} }
public void addToCurrentLine(Word current) { public void addToCurrentLine(TextPositionSequence current) {
sequencesInCurrentLine.add(current); sequencesInCurrentLine.add(current);
currentSequencesWithoutGaps.add(current); currentSequencesWithoutGaps.add(current);

View File

@ -13,7 +13,7 @@ import org.apache.pdfbox.pdmodel.PDPage;
import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.ClassPathResource;
import com.knecon.fforesight.service.layoutparser.processor.model.PageContents; import com.knecon.fforesight.service.layoutparser.processor.model.PageContents;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import com.knecon.fforesight.service.layoutparser.processor.services.parsing.PDFLinesTextStripper; import com.knecon.fforesight.service.layoutparser.processor.services.parsing.PDFLinesTextStripper;
import com.knecon.fforesight.service.layoutparser.processor.utils.RectangleTransformations; import com.knecon.fforesight.service.layoutparser.processor.utils.RectangleTransformations;
@ -40,7 +40,7 @@ public class PageContentExtractor {
stripper.setPdpage(pdPage); stripper.setPdpage(pdPage);
stripper.getText(pdDocument); stripper.getText(pdDocument);
Map<Float, List<Word>> sortedTextPositionSequencesPerDir = stripper.getWords() Map<Float, List<TextPositionSequence>> sortedTextPositionSequencesPerDir = stripper.getTextPositionSequences()
.stream() .stream()
.collect(Collectors.groupingBy(textPositionSequence -> textPositionSequence.getDir().getDegrees())); .collect(Collectors.groupingBy(textPositionSequence -> textPositionSequence.getDir().getDegrees()));
@ -57,7 +57,7 @@ public class PageContentExtractor {
} }
public List<Word> sortByDirAccordingToPageRotation(Map<Float, List<Word>> sortedTextPositionSequencesPerDir, int rotation) { public List<TextPositionSequence> sortByDirAccordingToPageRotation(Map<Float, List<TextPositionSequence>> sortedTextPositionSequencesPerDir, int rotation) {
LinkedList<Float> sortedKeys = new LinkedList<>(sortedTextPositionSequencesPerDir.keySet().stream().sorted().toList()); LinkedList<Float> sortedKeys = new LinkedList<>(sortedTextPositionSequencesPerDir.keySet().stream().sorted().toList());

View File

@ -14,9 +14,9 @@ public class PageInformationService {
public PageInformation build(PageContents pageContents) { public PageInformation build(PageContents pageContents) {
LineInformation lineInformation = LineDetectionService.calculateLineInformation(pageContents.getSortedWords()); LineInformation lineInformation = LineDetectionService.calculateLineInformation(pageContents.getSortedTextPositionSequences());
Rectangle2D mainBodyTextFrame = MainBodyTextFrameExtractionService.calculateMainBodyTextFrame(lineInformation); Rectangle2D mainBodyTextFrame = MainBodyTextFrameExtractionService.calculateMainBodyTextFrame(lineInformation);
GapInformation gapInformation = GapDetectionService.findGapsInLines(pageContents.getSortedWords(), mainBodyTextFrame); GapInformation gapInformation = GapDetectionService.findGapsInLines(pageContents.getSortedTextPositionSequences(), mainBodyTextFrame);
return new PageInformation(pageContents, lineInformation, mainBodyTextFrame, gapInformation); return new PageInformation(pageContents, lineInformation, mainBodyTextFrame, gapInformation);
} }

View File

@ -6,11 +6,10 @@ import java.util.stream.Stream;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.Document;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.NodeType;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.SemanticNode;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.SimplifiedSectionText; import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.SimplifiedSectionText;
import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.SimplifiedText; import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.SimplifiedText;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.Document;
import com.knecon.fforesight.service.layoutparser.processor.model.graph.nodes.SemanticNode;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@ -24,29 +23,18 @@ public class SimplifiedSectionTextService {
.stream() .stream()
.map(this::toSimplifiedSectionText) .map(this::toSimplifiedSectionText)
.toList(); .toList();
List<SimplifiedSectionText> simplifiedHeadersList = document.streamAllSubNodesOfType(NodeType.HEADER) List<SimplifiedSectionText> simplifiedHeadersList = document.getHeaders()
.stream()
.map(this::toSimplifiedSectionText) .map(this::toSimplifiedSectionText)
.toList(); .toList();
List<SimplifiedSectionText> simplifiedFootersList = document.streamAllSubNodesOfType(NodeType.FOOTER) List<SimplifiedSectionText> simplifiedFootersList = document.getFooters()
.stream()
.map(this::toSimplifiedSectionText) .map(this::toSimplifiedSectionText)
.toList(); .toList();
List<SimplifiedSectionText> simplifiedText = Stream.of(simplifiedMainSectionsList, simplifiedHeadersList, simplifiedFootersList) List<SimplifiedSectionText> simplifiedText = Stream.of(simplifiedMainSectionsList, simplifiedHeadersList, simplifiedFootersList)
.flatMap(List::stream) .flatMap(List::stream)
.collect(Collectors.toList()); .collect(Collectors.toList());
return SimplifiedText.builder() return SimplifiedText.builder().numberOfPages(document.getNumberOfPages()).sectionTexts(simplifiedText).build();
.numberOfPages(document.getNumberOfPages())
.sectionTexts(simplifiedText)
.mainSectionNumbers(document.getAllSections()
.stream()
.map(this::getSectionNumber)
.toList())
.headerSectionNumbers(document.streamAllSubNodesOfType(NodeType.HEADER)
.map(this::getSectionNumber)
.toList())
.footerSectionNumbers(document.streamAllSubNodesOfType(NodeType.FOOTER)
.map(this::getSectionNumber)
.toList())
.build();
} }
@ -61,13 +49,4 @@ public class SimplifiedSectionTextService {
.build(); .build();
} }
private String getSectionNumber(SemanticNode semanticNode) {
return semanticNode.getTreeId()
.stream()
.map(String::valueOf)
.collect(Collectors.joining("."));
}
} }

View File

@ -5,7 +5,7 @@ import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings; import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings;
import com.knecon.fforesight.service.layoutparser.processor.model.table.Ruling; import com.knecon.fforesight.service.layoutparser.processor.model.table.Ruling;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextDirection; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextDirection;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import lombok.experimental.UtilityClass; import lombok.experimental.UtilityClass;
@ -17,9 +17,9 @@ public class TextRulingsClassifier {
private final static double TEXT_BBOX_THRESHOLD_FACTOR = 0.15; // multiplied with text width then subtracted from word width. If ruling covers this width, it is considered as strikethrough/underline. private final static double TEXT_BBOX_THRESHOLD_FACTOR = 0.15; // multiplied with text width then subtracted from word width. If ruling covers this width, it is considered as strikethrough/underline.
public static void classifyUnderlinedAndStrikethroughText(List<Word> words, CleanRulings cleanRulings) { public static void classifyUnderlinedAndStrikethroughText(List<TextPositionSequence> words, CleanRulings cleanRulings) {
for (Word word : words) { for (TextPositionSequence word : words) {
if (word.getDir().equals(TextDirection.ZERO) || word.getDir().equals(TextDirection.HALF_CIRCLE)) { if (word.getDir().equals(TextDirection.ZERO) || word.getDir().equals(TextDirection.HALF_CIRCLE)) {
handleHorizontalText(cleanRulings, word); handleHorizontalText(cleanRulings, word);
} else { } else {
@ -29,7 +29,7 @@ public class TextRulingsClassifier {
} }
private static void handleVerticalText(CleanRulings cleanRulings, Word word) { private static void handleVerticalText(CleanRulings cleanRulings, TextPositionSequence word) {
float lowerY = (float) (word.getBBoxPdf().getMinY() + TEXT_BBOX_THRESHOLD_FACTOR * word.getWidth()); float lowerY = (float) (word.getBBoxPdf().getMinY() + TEXT_BBOX_THRESHOLD_FACTOR * word.getWidth());
float upperY = (float) (word.getBBoxPdf().getMaxY() - TEXT_BBOX_THRESHOLD_FACTOR * word.getWidth()); float upperY = (float) (word.getBBoxPdf().getMaxY() - TEXT_BBOX_THRESHOLD_FACTOR * word.getWidth());
@ -63,7 +63,7 @@ public class TextRulingsClassifier {
} }
private static void handleHorizontalText(CleanRulings cleanRulings, Word word) { private static void handleHorizontalText(CleanRulings cleanRulings, TextPositionSequence word) {
float leftX = (float) (word.getBBoxPdf().getMinX() + TEXT_BBOX_THRESHOLD_FACTOR * word.getWidth()); float leftX = (float) (word.getBBoxPdf().getMinX() + TEXT_BBOX_THRESHOLD_FACTOR * word.getWidth());
float rightX = (float) (word.getBBoxPdf().getMaxX() - TEXT_BBOX_THRESHOLD_FACTOR * word.getWidth()); float rightX = (float) (word.getBBoxPdf().getMaxX() - TEXT_BBOX_THRESHOLD_FACTOR * word.getWidth());

View File

@ -7,44 +7,23 @@ import java.util.List;
import java.util.ListIterator; import java.util.ListIterator;
import java.util.Locale; import java.util.Locale;
import org.apache.commons.text.similarity.LevenshteinDistance; import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.LayoutEngine; import com.knecon.fforesight.service.layoutparser.internal.api.data.redaction.LayoutEngine;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationDocument;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage; import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType; import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType;
import com.knecon.fforesight.service.layoutparser.processor.model.SectionIdentifier; import com.knecon.fforesight.service.layoutparser.processor.model.SectionIdentifier;
import com.knecon.fforesight.service.layoutparser.processor.model.outline.OutlineObject; import com.knecon.fforesight.service.layoutparser.processor.model.outline.OutlineObject;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import com.knecon.fforesight.service.layoutparser.processor.utils.PageInformation;
import com.knecon.fforesight.service.layoutparser.processor.utils.TextNormalizationUtilities;
import lombok.Data; import lombok.Data;
@Service @Service
public class BlockificationPostprocessingService { public class BlockificationPostprocessingService {
private static final float STRING_SIMILARITY_THRESHOLD = 0.1f; private static final float BLOCK_TO_OUTLINE_DISTANCE_THRESHOLD = 5.0f;
public void findHeadlinesFromOutline(ClassificationDocument classificationDocument, int pageNumber, ClassificationPage classificationPage, PageInformation pageInformation) {
OutlineObject lastProcessedOutlineObject = null;
List<OutlineObject> outlineObjects = classificationDocument.getOutlineObjectTree().getOutlineObjectsPerPage().getOrDefault(pageNumber, new ArrayList<>());
OutlineObject notFoundOutlineObject = null;
if (lastProcessedOutlineObject != null && !lastProcessedOutlineObject.isFound()) {
lastProcessedOutlineObject.resetPoint();
notFoundOutlineObject = lastProcessedOutlineObject;
}
if (!outlineObjects.isEmpty()) {
classificationPage.setOutlineObjects(outlineObjects);
lastProcessedOutlineObject = sanitizeOutlineBlocks(classificationPage, notFoundOutlineObject);
}
classificationDocument.getLayoutDebugLayer().addOutlineObjects(outlineObjects, pageInformation);
}
public OutlineObject sanitizeOutlineBlocks(ClassificationPage classificationPage, OutlineObject notFoundOutlineObject) { public OutlineObject sanitizeOutlineBlocks(ClassificationPage classificationPage, OutlineObject notFoundOutlineObject) {
@ -55,36 +34,38 @@ public class BlockificationPostprocessingService {
return null; return null;
} }
float pageHeight = classificationPage.getPageHeight();
ListIterator<OutlineObject> outlineObjectListIterator = outlineObjects.listIterator(); ListIterator<OutlineObject> outlineObjectListIterator = outlineObjects.listIterator();
if (notFoundOutlineObject != null) { if (notFoundOutlineObject != null) {
OutlineProcessionContext notFoundOutlineObjectProcessionContext = new OutlineProcessionContext(notFoundOutlineObject); OutlineProcessionContext notFoundOutlineObjectProcessionContext = new OutlineProcessionContext(notFoundOutlineObject);
processTextBlocks(getTextPageBlocks(classificationPage), notFoundOutlineObjectProcessionContext); processTextBlocks(getTextPageBlocks(classificationPage), pageHeight, notFoundOutlineObjectProcessionContext);
OutlineObject firstOutlineObject = null; OutlineObject firstOutlineObject = null;
OutlineProcessionContext firstOutlineObjectProcessionContext = null; OutlineProcessionContext firstOutlineObjectProcessionContext = null;
if (outlineObjectListIterator.hasNext()) { if (outlineObjectListIterator.hasNext()) {
firstOutlineObject = outlineObjectListIterator.next(); firstOutlineObject = outlineObjectListIterator.next();
firstOutlineObjectProcessionContext = new OutlineProcessionContext(firstOutlineObject); firstOutlineObjectProcessionContext = new OutlineProcessionContext(firstOutlineObject);
processTextBlocks(getTextPageBlocks(classificationPage), firstOutlineObjectProcessionContext); processTextBlocks(getTextPageBlocks(classificationPage), pageHeight, firstOutlineObjectProcessionContext);
} }
if (!contextsOverlap(notFoundOutlineObjectProcessionContext, firstOutlineObjectProcessionContext)) { if (!contextsOverlap(notFoundOutlineObjectProcessionContext, firstOutlineObjectProcessionContext)) {
notFoundOutlineObject.setFound(selectMatch(classificationPage, notFoundOutlineObjectProcessionContext)); notFoundOutlineObject.setFound(selectMatch(classificationPage, notFoundOutlineObjectProcessionContext, pageHeight));
} }
if (firstOutlineObject != null) { if (firstOutlineObject != null) {
// re-create the context for the updated blocks // re-create the context for the updated blocks
firstOutlineObjectProcessionContext = new OutlineProcessionContext(firstOutlineObject); firstOutlineObjectProcessionContext = new OutlineProcessionContext(firstOutlineObject);
processTextBlocks(getTextPageBlocks(classificationPage), firstOutlineObjectProcessionContext); processTextBlocks(getTextPageBlocks(classificationPage), pageHeight, firstOutlineObjectProcessionContext);
firstOutlineObject.setFound(selectMatch(classificationPage, firstOutlineObjectProcessionContext)); firstOutlineObject.setFound(selectMatch(classificationPage, firstOutlineObjectProcessionContext, pageHeight));
} }
} }
outlineObjectListIterator.forEachRemaining(outlineObject -> { outlineObjectListIterator.forEachRemaining(outlineObject -> {
OutlineProcessionContext outlineObjectProcessionContext = new OutlineProcessionContext(outlineObject); OutlineProcessionContext outlineObjectProcessionContext = new OutlineProcessionContext(outlineObject);
processTextBlocks(getTextPageBlocks(classificationPage), outlineObjectProcessionContext); processTextBlocks(getTextPageBlocks(classificationPage), pageHeight, outlineObjectProcessionContext);
outlineObject.setFound(selectMatch(classificationPage, outlineObjectProcessionContext)); outlineObject.setFound(selectMatch(classificationPage, outlineObjectProcessionContext, pageHeight));
}); });
if (!outlineObjects.isEmpty()) { if (!outlineObjects.isEmpty()) {
@ -123,7 +104,8 @@ public class BlockificationPostprocessingService {
double maxYFirst = blocksOfFirstOutline.stream() double maxYFirst = blocksOfFirstOutline.stream()
.mapToDouble(TextPageBlock::getPdfMaxY) .mapToDouble(TextPageBlock::getPdfMaxY)
.max().orElse(Double.NEGATIVE_INFINITY); .max()
.orElse(Double.NEGATIVE_INFINITY);
return blocksOfNotFoundOutline.stream() return blocksOfNotFoundOutline.stream()
.mapToDouble(TextPageBlock::getPdfMaxY) .mapToDouble(TextPageBlock::getPdfMaxY)
@ -145,13 +127,13 @@ public class BlockificationPostprocessingService {
} }
private void processTextBlocks(List<TextPageBlock> textBlocks, OutlineProcessionContext context) { private void processTextBlocks(List<TextPageBlock> textBlocks, float pageHeight, OutlineProcessionContext context) {
OutlineObject outlineObject = context.getOutlineObject(); OutlineObject outlineObject = context.getOutlineObject();
ListIterator<TextPageBlock> iterator = textBlocks.listIterator(); ListIterator<TextPageBlock> iterator = textBlocks.listIterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
TextPageBlock pageBlock = iterator.next(); TextPageBlock pageBlock = iterator.next();
if (outlineObject.isAbove(pageBlock)) { if (pageHeight - outlineObject.getPoint().getY() - BLOCK_TO_OUTLINE_DISTANCE_THRESHOLD <= pageBlock.getMaxY()) {
break; break;
} }
} }
@ -166,7 +148,7 @@ public class BlockificationPostprocessingService {
} }
private boolean selectMatch(ClassificationPage classificationPage, OutlineProcessionContext context) { private boolean selectMatch(ClassificationPage classificationPage, OutlineProcessionContext context, float pageHeight) {
OutlineObject outlineObject = context.outlineObject; OutlineObject outlineObject = context.outlineObject;
TextPageBlock directMatch = context.directMatch; TextPageBlock directMatch = context.directMatch;
@ -174,8 +156,8 @@ public class BlockificationPostprocessingService {
TextPageBlock splitCandidate = context.splitCandidate; TextPageBlock splitCandidate = context.splitCandidate;
PageBlockType headlineType = PageBlockType.getHeadlineType(outlineObject.getTreeDepth()); PageBlockType headlineType = PageBlockType.getHeadlineType(outlineObject.getTreeDepth());
double distanceToDirectMatch = directMatch != null ? calculateDistance(outlineObject, directMatch) : Double.MAX_VALUE; double distanceToDirectMatch = directMatch != null ? calculateDistance(outlineObject, directMatch, pageHeight) : Double.MAX_VALUE;
double distanceToSplitCandidate = splitCandidate != null ? calculateDistance(outlineObject, splitCandidate) : Double.MAX_VALUE; double distanceToSplitCandidate = splitCandidate != null ? calculateDistance(outlineObject, splitCandidate, pageHeight) : Double.MAX_VALUE;
double distanceToBestMergeCandidates = Double.MAX_VALUE; double distanceToBestMergeCandidates = Double.MAX_VALUE;
List<TextPageBlock> bestMergeCandidateCombination = new ArrayList<>(); List<TextPageBlock> bestMergeCandidateCombination = new ArrayList<>();
@ -195,8 +177,9 @@ public class BlockificationPostprocessingService {
for (List<TextPageBlock> combination : combinations) { for (List<TextPageBlock> combination : combinations) {
double averageDistance = combination.stream() double averageDistance = combination.stream()
.map(block -> calculateDistance(outlineObject, block)) .map(block -> calculateDistance(outlineObject, block, pageHeight))
.mapToDouble(Double::doubleValue).average().orElse(Double.MAX_VALUE); .mapToDouble(Double::doubleValue).average()
.orElse(Double.MAX_VALUE);
if (distanceToBestMergeCandidates > averageDistance) { if (distanceToBestMergeCandidates > averageDistance) {
distanceToBestMergeCandidates = averageDistance; distanceToBestMergeCandidates = averageDistance;
bestMergeCandidateCombination = combination; bestMergeCandidateCombination = combination;
@ -242,14 +225,14 @@ public class BlockificationPostprocessingService {
headline = sectionIdentifier + headline; headline = sectionIdentifier + headline;
} }
WordSequenceResult wordSequenceResult = findWordSequence(blockToSplit.getWords(), headline); WordSequenceResult wordSequenceResult = findWordSequence(blockToSplit.getSequences(), headline);
if (wordSequenceResult.inSequence.isEmpty() && !headline.equals(title)) { if (wordSequenceResult.inSequence.isEmpty() && !headline.equals(title)) {
wordSequenceResult = findWordSequence(blockToSplit.getWords(), title); wordSequenceResult = findWordSequence(blockToSplit.getSequences(), title);
} }
boolean modifiedBlockToSplit = false; boolean modifiedBlockToSplit = false;
if (!wordSequenceResult.inSequence.isEmpty()) { if (!wordSequenceResult.inSequence.isEmpty()) {
blockToSplit.setWords(wordSequenceResult.inSequence); blockToSplit.setSequences(wordSequenceResult.inSequence);
blockToSplit.recalculateBBox(); blockToSplit.recalculateBBox();
modifiedBlockToSplit = true; modifiedBlockToSplit = true;
} }
@ -270,19 +253,19 @@ public class BlockificationPostprocessingService {
} }
private static WordSequenceResult findWordSequence(List<Word> words, String text) { private static WordSequenceResult findWordSequence(List<TextPositionSequence> textPositionSequences, String text) {
String target = sanitizeString(text); String target = sanitizeString(text);
List<Word> inSequence = new ArrayList<>(); List<TextPositionSequence> inSequence = new ArrayList<>();
List<Word> preSequence = new ArrayList<>(); List<TextPositionSequence> preSequence = new ArrayList<>();
List<Word> postSequence = new ArrayList<>(); List<TextPositionSequence> postSequence = new ArrayList<>();
StringBuilder currentSequence = new StringBuilder(); StringBuilder currentSequence = new StringBuilder();
if (target.isBlank()) { if (target.isBlank()) {
return new WordSequenceResult(); return new WordSequenceResult();
} }
for (Word sequence : words) { for (TextPositionSequence sequence : textPositionSequences) {
currentSequence.append(sanitizeString(sequence.toString())); currentSequence.append(sanitizeString(sequence.toString()));
inSequence.add(sequence); inSequence.add(sequence);
@ -294,10 +277,10 @@ public class BlockificationPostprocessingService {
int index = 0; int index = 0;
String toRemove = currentSequence.substring(0, currentSequence.length() - target.length()); String toRemove = currentSequence.substring(0, currentSequence.length() - target.length());
Word next = inSequence.get(index); TextPositionSequence next = inSequence.get(index);
while (currentSequence.length() - next.length() >= target.length()) { while (currentSequence.length() - next.length() >= target.length()) {
Word removed = inSequence.remove(index); TextPositionSequence removed = inSequence.remove(index);
currentSequence.delete(0, removed.toString().length()); currentSequence.delete(0, removed.toString().length());
preSequence.add(removed); preSequence.add(removed);
@ -326,7 +309,7 @@ public class BlockificationPostprocessingService {
} }
if (currentSequence.toString().equals(target)) { if (currentSequence.toString().equals(target)) {
postSequence.addAll(words.subList(words.indexOf(sequence) + 1, words.size())); postSequence.addAll(textPositionSequences.subList(textPositionSequences.indexOf(sequence) + 1, textPositionSequences.size()));
return new WordSequenceResult(inSequence, preSequence, postSequence); return new WordSequenceResult(inSequence, preSequence, postSequence);
} }
} }
@ -336,10 +319,10 @@ public class BlockificationPostprocessingService {
} }
private static SplitSequenceResult splitSequence(Word sequence, String toRemove) { private static SplitSequenceResult splitSequence(TextPositionSequence sequence, String toRemove) {
Word in = null; TextPositionSequence in = null;
Word out; TextPositionSequence out;
String currentSequence = sequence.toString().toLowerCase(Locale.ROOT); String currentSequence = sequence.toString().toLowerCase(Locale.ROOT);
int index = currentSequence.indexOf(toRemove); int index = currentSequence.indexOf(toRemove);
@ -349,17 +332,17 @@ public class BlockificationPostprocessingService {
if (index > 0) { if (index > 0) {
in = createSubSequence(sequence, 0, index); in = createSubSequence(sequence, 0, index);
} else if (endIndex < sequence.length()) { } else if (endIndex < sequence.getTextPositions().size()) {
in = createSubSequence(sequence, endIndex, sequence.length()); in = createSubSequence(sequence, endIndex, sequence.getTextPositions().size());
} }
return new SplitSequenceResult(in, out); return new SplitSequenceResult(in, out);
} }
private static Word createSubSequence(Word sequence, int start, int end) { private static TextPositionSequence createSubSequence(TextPositionSequence sequence, int start, int end) {
Word newSeq = new Word(new ArrayList<>(sequence.getCharacters().subList(start, end)), sequence.getPage()); TextPositionSequence newSeq = new TextPositionSequence(new ArrayList<>(sequence.getTextPositions().subList(start, end)), sequence.getPage());
newSeq.setParagraphStart(sequence.isParagraphStart()); newSeq.setParagraphStart(sequence.isParagraphStart());
return newSeq; return newSeq;
} }
@ -374,10 +357,10 @@ public class BlockificationPostprocessingService {
List<TextPageBlock> mergedBlocks = new ArrayList<>(); List<TextPageBlock> mergedBlocks = new ArrayList<>();
for (TextPageBlock textPageBlock : blocksToMerge.subList(1, blocksToMerge.size())) { for (TextPageBlock textPageBlock : blocksToMerge.subList(1, blocksToMerge.size())) {
if (firstBlock != null && !firstBlock.getWords().isEmpty()) { if (firstBlock != null && !firstBlock.getSequences().isEmpty()) {
if (textPageBlock.getDir() == firstBlock.getDir()) { if (textPageBlock.getDir() == firstBlock.getDir()) {
firstBlock.addAll(textPageBlock.getWords()); firstBlock.getSequences().addAll(textPageBlock.getSequences());
mergedBlocks.add(textPageBlock); mergedBlocks.add(textPageBlock);
} }
} }
@ -423,9 +406,11 @@ public class BlockificationPostprocessingService {
} }
private double calculateDistance(OutlineObject outlineObject, TextPageBlock pageBlock) { private double calculateDistance(OutlineObject outlineObject, TextPageBlock pageBlock, float pageHeight) {
return outlineObject.distance(pageBlock); double deltaX = outlineObject.getPoint().getX() - pageBlock.getMinX();
double deltaY = pageHeight - outlineObject.getPoint().getY() - pageBlock.getMinY();
return Math.sqrt(deltaX * deltaX + deltaY * deltaY);
} }
@ -442,13 +427,6 @@ public class BlockificationPostprocessingService {
String blockText = sanitizeString(pageBlock.getText()); String blockText = sanitizeString(pageBlock.getText());
String outlineTitle = sanitizeString(outlineObject.getTitle()); String outlineTitle = sanitizeString(outlineObject.getTitle());
int threshold = (int) (Math.min(blockText.length(), outlineTitle.length()) * STRING_SIMILARITY_THRESHOLD) + 1;
int distance = new LevenshteinDistance(threshold).apply(blockText, outlineTitle);
if (distance >= 0 && distance < threshold) {
context.directMatch = pageBlock;
return true;
}
boolean blockTextContainsOutlineTitle = blockText.contains(outlineTitle); boolean blockTextContainsOutlineTitle = blockText.contains(outlineTitle);
boolean outlineTitleContainsBlockText = outlineTitle.contains(blockText); boolean outlineTitleContainsBlockText = outlineTitle.contains(blockText);
@ -487,9 +465,7 @@ public class BlockificationPostprocessingService {
private static String sanitizeString(String text) { private static String sanitizeString(String text) {
return TextNormalizationUtilities.removeAllWhitespaces(text)// return StringUtils.deleteWhitespace(text).toLowerCase(Locale.ROOT);
.trim() // sometimes there are trailing empty bytes at the end of the string trim() seems to remove them
.toLowerCase(Locale.ENGLISH);
} }
@ -516,12 +492,12 @@ public class BlockificationPostprocessingService {
public static class WordSequenceResult { public static class WordSequenceResult {
public List<Word> inSequence; public List<TextPositionSequence> inSequence;
public List<Word> preSequence; public List<TextPositionSequence> preSequence;
public List<Word> postSequence; public List<TextPositionSequence> postSequence;
public WordSequenceResult(List<Word> inSequence, List<Word> preSequence, List<Word> postSequence) { public WordSequenceResult(List<TextPositionSequence> inSequence, List<TextPositionSequence> preSequence, List<TextPositionSequence> postSequence) {
this.inSequence = inSequence; this.inSequence = inSequence;
this.preSequence = preSequence; this.preSequence = preSequence;
@ -542,7 +518,7 @@ public class BlockificationPostprocessingService {
} }
public record SplitSequenceResult(Word in, Word out) { public record SplitSequenceResult(TextPositionSequence in, TextPositionSequence out) {
} }

View File

@ -14,7 +14,7 @@ import com.knecon.fforesight.service.layoutparser.processor.model.Classification
import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings; import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings;
import com.knecon.fforesight.service.layoutparser.processor.model.table.TablePageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.table.TablePageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer; import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@ -30,7 +30,7 @@ public class DocstrumBlockificationService {
static final float THRESHOLD = 1f; static final float THRESHOLD = 1f;
public ClassificationPage blockify(List<Word> textPositions, public ClassificationPage blockify(List<TextPositionSequence> textPositions,
CleanRulings rulings, CleanRulings rulings,
boolean xyOrder, boolean xyOrder,
LayoutDebugLayer visualizations, LayoutDebugLayer visualizations,
@ -72,16 +72,16 @@ public class DocstrumBlockificationService {
List<AbstractPageBlock> abstractPageBlocks = new ArrayList<>(); List<AbstractPageBlock> abstractPageBlocks = new ArrayList<>();
zones.forEach(zone -> { zones.forEach(zone -> {
List<Word> words = new ArrayList<>(); List<TextPositionSequence> textPositionSequences = new ArrayList<>();
zone.getLines() zone.getLines()
.forEach(line -> { .forEach(line -> {
line.getWords() line.getWords()
.forEach(word -> { .forEach(word -> {
words.add(new Word(word.getCharacters(), word.getPage())); textPositionSequences.add(new TextPositionSequence(word.getTextPositions(), word.getPage()));
}); });
}); });
abstractPageBlocks.add(buildTextBlock(words, 0)); abstractPageBlocks.add(buildTextBlock(textPositionSequences, 0));
}); });
return abstractPageBlocks; return abstractPageBlocks;
@ -102,7 +102,7 @@ public class DocstrumBlockificationService {
} }
TextPageBlock current = (TextPageBlock) block; TextPageBlock current = (TextPageBlock) block;
if (previous != null && !previous.getWords().isEmpty()) { if (previous != null && !previous.getSequences().isEmpty()) {
if (current.getDir() != previous.getDir() || usedRulings.lineBetween(current, previous)) { if (current.getDir() != previous.getDir() || usedRulings.lineBetween(current, previous)) {
previous = current; previous = current;
@ -182,8 +182,8 @@ public class DocstrumBlockificationService {
private TextPageBlock combineBlocksAndResetIterator(TextPageBlock previous, TextPageBlock current, ListIterator<AbstractPageBlock> itty, boolean toDuplicate) { private TextPageBlock combineBlocksAndResetIterator(TextPageBlock previous, TextPageBlock current, ListIterator<AbstractPageBlock> itty, boolean toDuplicate) {
previous.addAll(current.getWords()); previous.getSequences().addAll(current.getSequences());
previous = buildTextBlock(previous.getWords(), 0); previous = buildTextBlock(previous.getSequences(), 0);
previous.setToDuplicate(toDuplicate); previous.setToDuplicate(toDuplicate);
if (current.getClassification() != null && previous.getClassification() == null) { if (current.getClassification() != null && previous.getClassification() == null) {
previous.setClassification(current.getClassification()); previous.setClassification(current.getClassification());
@ -283,8 +283,8 @@ public class DocstrumBlockificationService {
if (current.getDir() == inner.getDir() && current.intersects(inner, yThreshold, xThreshold)) { if (current.getDir() == inner.getDir() && current.intersects(inner, yThreshold, xThreshold)) {
boolean toDuplicate = current.isToDuplicate() || inner.isToDuplicate(); boolean toDuplicate = current.isToDuplicate() || inner.isToDuplicate();
current.addAll(inner.getWords()); current.getSequences().addAll(inner.getSequences());
current = buildTextBlock(current.getWords(), 0); current = buildTextBlock(current.getSequences(), 0);
current.setToDuplicate(toDuplicate); current.setToDuplicate(toDuplicate);
blocks.set(i, null); blocks.set(i, null);
@ -301,7 +301,7 @@ public class DocstrumBlockificationService {
} }
public static TextPageBlock buildTextBlock(List<Word> wordBlockList, int indexOnPage) { public static TextPageBlock buildTextBlock(List<TextPositionSequence> wordBlockList, int indexOnPage) {
return new TextPageBlock(wordBlockList); return new TextPageBlock(wordBlockList);
} }

View File

@ -2,28 +2,23 @@ package com.knecon.fforesight.service.layoutparser.processor.services.blockifica
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.ListIterator;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import com.iqser.red.service.redaction.v1.server.model.document.nodes.LayoutEngine;
import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage; import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
import com.knecon.fforesight.service.layoutparser.processor.model.Orientation; import com.knecon.fforesight.service.layoutparser.processor.model.Orientation;
import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings; import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings;
import com.knecon.fforesight.service.layoutparser.processor.model.table.TablePageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
@SuppressWarnings("all")
@Service @Service
public class DocuMineBlockificationService { public class DocuMineBlockificationService {
static final float THRESHOLD = 1f; static final float THRESHOLD = 1f;
public static final double FONT_SIZE_CHANGE_RATIO = 0.15;
Pattern pattern = Pattern.compile("^(\\d{1,2}\\.){1,3}\\d{1,2}\\.?\\s[0-9A-Za-z ()-]{2,50}", Pattern.CASE_INSENSITIVE); Pattern pattern = Pattern.compile("^(\\d{1,2}\\.){1,3}\\d{1,2}\\.?\\s[0-9A-Za-z ()-]{2,50}", Pattern.CASE_INSENSITIVE);
@ -37,9 +32,9 @@ public class DocuMineBlockificationService {
* @param cleanRulings All rulings on a page * @param cleanRulings All rulings on a page
* @return Page object that contains the Textblock and text statistics. * @return Page object that contains the Textblock and text statistics.
*/ */
public ClassificationPage blockify(List<Word> textPositions, CleanRulings cleanRulings) { public ClassificationPage blockify(List<TextPositionSequence> textPositions, CleanRulings cleanRulings) {
List<Word> chunkWords = new ArrayList<>(); List<TextPositionSequence> chunkWords = new ArrayList<>();
List<AbstractPageBlock> textPageBlocks = new ArrayList<>(); List<AbstractPageBlock> textPageBlocks = new ArrayList<>();
CleanRulings usedRulings = cleanRulings.withoutTextRulings(); CleanRulings usedRulings = cleanRulings.withoutTextRulings();
@ -48,11 +43,11 @@ public class DocuMineBlockificationService {
double maxX = 0; double maxX = 0;
double minY = 1000; double minY = 1000;
double maxY = 0; double maxY = 0;
Word prev = null; TextPositionSequence prev = null;
boolean wasSplitted = false; boolean wasSplitted = false;
Double splitX1 = null; Double splitX1 = null;
for (Word word : textPositions) { for (TextPositionSequence word : textPositions) {
boolean lineSeparation = prev != null && word.getYDirAdj() - prev.getMaxYDirAdj() > Math.min(word.getHeight(), prev.getHeight()) * 1.1; boolean lineSeparation = prev != null && word.getYDirAdj() - prev.getMaxYDirAdj() > Math.min(word.getHeight(), prev.getHeight()) * 1.1;
boolean startFromTop = prev != null && word.getYDirAdj() < prev.getYDirAdj() - prev.getTextHeight(); boolean startFromTop = prev != null && word.getYDirAdj() < prev.getYDirAdj() - prev.getTextHeight();
@ -61,7 +56,9 @@ public class DocuMineBlockificationService {
boolean newLineAfterSplit = prev != null && word.getYDirAdj() != prev.getYDirAdj() && wasSplitted && splitX1 != word.getXDirAdj(); boolean newLineAfterSplit = prev != null && word.getYDirAdj() != prev.getYDirAdj() && wasSplitted && splitX1 != word.getXDirAdj();
boolean isSplitByRuling = prev != null && usedRulings.lineBetween(prev, word); boolean isSplitByRuling = prev != null && usedRulings.lineBetween(prev, word);
boolean splitByDir = prev != null && !prev.getDir().equals(word.getDir()); boolean splitByDir = prev != null && !prev.getDir().equals(word.getDir());
boolean splitByOtherFontAndOtherY = prev != null && Math.abs(prev.getMaxYDirAdj() - word.getMaxYDirAdj()) > word.getTextHeight() * 0.2 && isFontChange(word, prev); boolean splitByOtherFontAndOtherY = prev != null && Math.abs(prev.getMaxYDirAdj() - word.getMaxYDirAdj()) > word.getTextHeight() * 0.2 //
&& (word.getFontStyle().contains("bold") && !prev.getFontStyle().contains("bold") //
|| prev.getFontStyle().contains("bold") && !word.getFontStyle().contains("bold"));
Matcher matcher = pattern.matcher(chunkWords.stream() Matcher matcher = pattern.matcher(chunkWords.stream()
.collect(Collectors.joining(" ")).toString()); .collect(Collectors.joining(" ")).toString());
@ -123,86 +120,5 @@ public class DocuMineBlockificationService {
return new ClassificationPage(textPageBlocks); return new ClassificationPage(textPageBlocks);
} }
private static boolean isFontChange(Word word, Word prev) {
return word.getFontStyle().contains("bold") && !prev.getFontStyle().contains("bold")
|| prev.getFontStyle().contains("bold") && !word.getFontStyle().contains("bold")
|| Math.abs(prev.getFontSize() - word.getFontSize()) >= FONT_SIZE_CHANGE_RATIO * Math.min(prev.getFontSize(), word.getFontSize())
|| Math.abs(word.getTextHeight() - prev.getTextHeight()) >= FONT_SIZE_CHANGE_RATIO * Math.min(prev.getTextHeight(), word.getTextHeight());
}
public void mergeblocks(ClassificationPage page, CleanRulings usedRulings, float xThreshold, float yThreshold) {
var blocks = page.getTextBlocks();
ListIterator<AbstractPageBlock> itty = blocks.listIterator();
while (itty.hasNext()) {
AbstractPageBlock block = itty.next();
if (block == null) {
continue;
}
if (block instanceof TablePageBlock) {
continue;
}
TextPageBlock current = (TextPageBlock) block;
for (int i = 0; i < blocks.size(); i++) {
AbstractPageBlock abstractPageBlock = blocks.get(i);
if (abstractPageBlock == null) {
continue;
}
if (abstractPageBlock == current) {
continue;
}
if (abstractPageBlock instanceof TablePageBlock) {
continue;
}
if (isHeadlineFromOutline(current) || isHeadlineFromOutline(abstractPageBlock)) {
continue;
}
TextPageBlock inner = (TextPageBlock) abstractPageBlock;
if (usedRulings.lineBetween(current, blocks.get(i))) {
continue;
}
if (current.getDir() == inner.getDir() && current.intersects(inner, yThreshold, xThreshold) && (current.getClassification() == null || current.getClassification()
.equals(inner.getClassification()))) {
boolean toDuplicate = current.isToDuplicate() || inner.isToDuplicate();
current.addAll(inner.getWords());
current = buildTextBlock(current.getWords(), 0);
current.setClassification(inner.getClassification());
current.setToDuplicate(toDuplicate);
blocks.set(i, null);
itty.set(current);
}
}
}
var blocksIterator = blocks.iterator();
while (blocksIterator.hasNext()) {
if (blocksIterator.next() == null) {
blocksIterator.remove();
}
}
}
private boolean isHeadlineFromOutline(AbstractPageBlock abstractPageBlock) {
return abstractPageBlock.getEngines().contains(LayoutEngine.OUTLINE) && abstractPageBlock.getClassification() != null && abstractPageBlock.getClassification().isHeadline();
}
public static TextPageBlock buildTextBlock(List<Word> wordBlockList, int indexOnPage) {
return new TextPageBlock(wordBlockList);
}
} }

View File

@ -11,7 +11,7 @@ import com.knecon.fforesight.service.layoutparser.processor.model.Classification
import com.knecon.fforesight.service.layoutparser.processor.model.Orientation; import com.knecon.fforesight.service.layoutparser.processor.model.Orientation;
import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings; import com.knecon.fforesight.service.layoutparser.processor.model.table.CleanRulings;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.text.Word; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPositionSequence;
import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer; import com.knecon.fforesight.service.layoutparser.processor.visualization.LayoutDebugLayer;
@SuppressWarnings("all") @SuppressWarnings("all")
@ -30,20 +30,20 @@ public class RedactManagerBlockificationService {
* @param visualizations * @param visualizations
* @return Page object that contains the Textblock and text statistics. * @return Page object that contains the Textblock and text statistics.
*/ */
public ClassificationPage blockify(List<Word> textPositions, CleanRulings cleanRulings, LayoutDebugLayer visualizations) { public ClassificationPage blockify(List<TextPositionSequence> textPositions, CleanRulings cleanRulings, LayoutDebugLayer visualizations) {
CleanRulings usedRulings = cleanRulings.withoutTextRulings(); CleanRulings usedRulings = cleanRulings.withoutTextRulings();
int indexOnPage = 0; int indexOnPage = 0;
List<Word> chunkWords = new ArrayList<>(); List<TextPositionSequence> chunkWords = new ArrayList<>();
List<AbstractPageBlock> chunkBlockList = new ArrayList<>(); List<AbstractPageBlock> chunkBlockList = new ArrayList<>();
double minX = 1000, maxX = 0, minY = 1000, maxY = 0; double minX = 1000, maxX = 0, minY = 1000, maxY = 0;
Word prev = null; TextPositionSequence prev = null;
boolean wasSplitted = false; boolean wasSplitted = false;
Double splitX1 = null; Double splitX1 = null;
for (Word word : textPositions) { for (TextPositionSequence word : textPositions) {
boolean lineSeparation = word.getYDirAdj() - maxY > word.getHeight() * 1.25; boolean lineSeparation = word.getYDirAdj() - maxY > word.getHeight() * 1.25;
boolean startFromTop = prev != null && word.getYDirAdj() < prev.getYDirAdj() - prev.getTextHeight(); boolean startFromTop = prev != null && word.getYDirAdj() < prev.getYDirAdj() - prev.getTextHeight();
@ -161,6 +161,7 @@ public class RedactManagerBlockificationService {
} }
if (!textPositions.isEmpty()) { if (!textPositions.isEmpty()) {
visualizations.addTextBlockVisualizations(chunkBlockList.stream() visualizations.addTextBlockVisualizations(chunkBlockList.stream()
.map(tb -> (TextPageBlock) tb)
.toList(), textPositions.get(0).getPage()); .toList(), textPositions.get(0).getPage());
} }

View File

@ -10,6 +10,7 @@ import com.knecon.fforesight.service.layoutparser.processor.model.Classification
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage; import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType; import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.utils.MarkedContentUtils;
import com.knecon.fforesight.service.layoutparser.processor.utils.PositionUtils; import com.knecon.fforesight.service.layoutparser.processor.utils.PositionUtils;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@ -22,7 +23,7 @@ public class ClarifyndClassificationService {
public void classifyDocument(ClassificationDocument document) { public void classifyDocument(ClassificationDocument document) {
List<Double> headlineFontSizes = document.getFontSizeCounter().getValuesInReverseOrder(); List<Double> headlineFontSizes = document.getFontSizeCounter().getHigherThanMostPopular();
log.debug("Document FontSize counters are: {}", document.getFontSizeCounter().getCountPerValue()); log.debug("Document FontSize counters are: {}", document.getFontSizeCounter().getCountPerValue());
@ -34,10 +35,7 @@ public class ClarifyndClassificationService {
} }
private void classifyPage(HeadlineClassificationService headlineClassificationService, private void classifyPage(HeadlineClassificationService headlineClassificationService, ClassificationPage page, ClassificationDocument document, List<Double> headlineFontSizes) {
ClassificationPage page,
ClassificationDocument document,
List<Double> headlineFontSizes) {
for (AbstractPageBlock textBlock : page.getTextBlocks()) { for (AbstractPageBlock textBlock : page.getTextBlocks()) {
if (textBlock instanceof TextPageBlock) { if (textBlock instanceof TextPageBlock) {
@ -47,11 +45,7 @@ public class ClarifyndClassificationService {
} }
private void classifyBlock(HeadlineClassificationService headlineClassificationService, private void classifyBlock(HeadlineClassificationService headlineClassificationService, TextPageBlock textBlock, ClassificationPage page, ClassificationDocument document, List<Double> headlineFontSizes) {
TextPageBlock textBlock,
ClassificationPage page,
ClassificationDocument document,
List<Double> headlineFontSizes) {
var bodyTextFrame = page.getBodyTextFrame(); var bodyTextFrame = page.getBodyTextFrame();
@ -59,58 +53,63 @@ public class ClarifyndClassificationService {
headlineClassificationService.setLastHeadlineFromOutline(textBlock); headlineClassificationService.setLastHeadlineFromOutline(textBlock);
return; return;
} }
if (textBlock.getClassification() != null && (textBlock.getClassification().equals(PageBlockType.HEADER)
|| textBlock.getClassification().equals(PageBlockType.FOOTER)
|| textBlock.getClassification().equals(PageBlockType.TABLE_OF_CONTENTS_ITEM))) {
return;
}
if (document.getFontSizeCounter().getMostPopular() == null) { if (document.getFontSizeCounter().getMostPopular() == null) {
textBlock.setClassification(PageBlockType.PARAGRAPH); textBlock.setClassification(PageBlockType.PARAGRAPH);
return; return;
} }
if (page.getPageNumber() == 1 // if (MarkedContentUtils.intersects(textBlock, page.getMarkedContentBboxPerType(), MarkedContentUtils.HEADER) || PositionUtils.isOverBodyTextFrame(bodyTextFrame,
&& (PositionUtils.getHeightDifferenceBetweenChunkWordAndDocumentWord(textBlock, document.getTextHeightCounter().getMostPopular()) > 2.5 textBlock,
&& textBlock.getHighestFontSize() > document.getFontSizeCounter().getMostPopular() || page.getTextBlocks().size() == 1)) { page.getRotation()) && (document.getFontSizeCounter().getMostPopular() == null || textBlock.getHighestFontSize() <= document.getFontSizeCounter()
if (!Pattern.matches("[0-9]+", textBlock.toString())) { .getMostPopular())) {
PageBlockType headlineType = HeadlineClassificationService.headlineClassByFontSize(textBlock, headlineFontSizes);
headlineClassificationService.classifyHeadline(textBlock, headlineType);
document.setHeadlines(true);
}
} else if (textBlock.getMostPopularWordFontSize() > document.getFontSizeCounter().getMostPopular()
&& PositionUtils.getApproxLineCount(textBlock) < 4.9
&& (textBlock.getMostPopularWordStyle().equals("bold")
|| !document.getFontStyleCounter().getCountPerValue().containsKey("bold")
&& textBlock.getMostPopularWordFontSize() > document.getFontSizeCounter().getMostPopular() + 1)
&& textBlock.getWords().get(0).getFontSize()>= textBlock.getMostPopularWordFontSize()) {
PageBlockType headlineType = HeadlineClassificationService.headlineClassByFontSize(textBlock, headlineFontSizes);
headlineClassificationService.classifyHeadline(textBlock, headlineType);
document.setHeadlines(true);
} else if (!textBlock.getText().startsWith("Figure ")
&& PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)
&& textBlock.getMostPopularWordStyle().equals("bold")
&& !document.getFontStyleCounter().getMostPopular().equals("bold")
&& PositionUtils.getApproxLineCount(textBlock) < 2.9
&& textBlock.getWords().get(0).getFontSize() >= textBlock.getMostPopularWordFontSize()) {
PageBlockType headlineType = HeadlineClassificationService.headlineClassByFontSize(textBlock, headlineFontSizes);
headlineClassificationService.classifyHeadline(textBlock, headlineType);
document.setHeadlines(true);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)
&& textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular()
&& textBlock.getMostPopularWordStyle().equals("bold")
&& !document.getFontStyleCounter().getMostPopular().equals("bold")) {
textBlock.setClassification(PageBlockType.PARAGRAPH_BOLD);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)
&& textBlock.getMostPopularWordFont().equals(document.getFontCounter().getMostPopular())
&& textBlock.getMostPopularWordStyle().equals(document.getFontStyleCounter().getMostPopular())
&& textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular()) {
textBlock.setClassification(PageBlockType.PARAGRAPH); textBlock.setClassification(PageBlockType.PARAGRAPH);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)
&& textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular() } else if (MarkedContentUtils.intersects(textBlock, page.getMarkedContentBboxPerType(), MarkedContentUtils.FOOTER) || PositionUtils.isUnderBodyTextFrame(bodyTextFrame,
&& textBlock.getMostPopularWordStyle().equals("italic") textBlock,
&& !document.getFontStyleCounter().getMostPopular().equals("italic") page.getRotation()) && (document.getFontSizeCounter().getMostPopular() == null || textBlock.getHighestFontSize() <= document.getFontSizeCounter()
&& PositionUtils.getApproxLineCount(textBlock) < 2.9) { .getMostPopular())) {
textBlock.setClassification(PageBlockType.PARAGRAPH);
} else if (page.getPageNumber() == 1 && (PositionUtils.getHeightDifferenceBetweenChunkWordAndDocumentWord(textBlock,
document.getTextHeightCounter().getMostPopular()) > 2.5 && textBlock.getHighestFontSize() > document.getFontSizeCounter().getMostPopular() || page.getTextBlocks()
.size() == 1)) {
if (!Pattern.matches("[0-9]+", textBlock.toString())) {
textBlock.setClassification(PageBlockType.TITLE);
}
} else if (textBlock.getMostPopularWordFontSize() > document.getFontSizeCounter()
.getMostPopular() && PositionUtils.getApproxLineCount(textBlock) < 4.9 && (textBlock.getMostPopularWordStyle().equals("bold") || !document.getFontStyleCounter()
.getCountPerValue()
.containsKey("bold") && textBlock.getMostPopularWordFontSize() > document.getFontSizeCounter().getMostPopular() + 1) && textBlock.getSequences()
.get(0)
.getTextPositions()
.get(0)
.getFontSizeInPt() >= textBlock.getMostPopularWordFontSize()) {
for (int i = 1; i <= headlineFontSizes.size(); i++) {
if (textBlock.getMostPopularWordFontSize() == headlineFontSizes.get(i - 1)) {
PageBlockType headlineType = PageBlockType.getHeadlineType(i);
headlineClassificationService.classifyHeadline(textBlock, headlineType);
document.setHeadlines(true);
}
}
} else if (!textBlock.getText().startsWith("Figure ") && PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock) && textBlock.getMostPopularWordStyle()
.equals("bold") && !document.getFontStyleCounter().getMostPopular().equals("bold") && PositionUtils.getApproxLineCount(textBlock) < 2.9 && textBlock.getSequences()
.get(0)
.getTextPositions()
.get(0)
.getFontSizeInPt() >= textBlock.getMostPopularWordFontSize()) {
PageBlockType headlineType = PageBlockType.getHeadlineType(headlineFontSizes.size() + 1);
headlineClassificationService.classifyHeadline(textBlock, headlineType);
document.setHeadlines(true);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock) && textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter()
.getMostPopular() && textBlock.getMostPopularWordStyle().equals("bold") && !document.getFontStyleCounter().getMostPopular().equals("bold")) {
textBlock.setClassification(PageBlockType.PARAGRAPH_BOLD);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock) && textBlock.getMostPopularWordFont()
.equals(document.getFontCounter().getMostPopular()) && textBlock.getMostPopularWordStyle()
.equals(document.getFontStyleCounter().getMostPopular()) && textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular()) {
textBlock.setClassification(PageBlockType.PARAGRAPH);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock) && textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter()
.getMostPopular() && textBlock.getMostPopularWordStyle().equals("italic") && !document.getFontStyleCounter()
.getMostPopular()
.equals("italic") && PositionUtils.getApproxLineCount(textBlock) < 2.9) {
textBlock.setClassification(PageBlockType.PARAGRAPH_ITALIC); textBlock.setClassification(PageBlockType.PARAGRAPH_ITALIC);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)) { } else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)) {
textBlock.setClassification(PageBlockType.PARAGRAPH_UNKNOWN); textBlock.setClassification(PageBlockType.PARAGRAPH_UNKNOWN);

View File

@ -1,33 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.services.classification;
import java.util.regex.Pattern;
public class ClassificationPatterns {
public static final Pattern HEADLINE_WITH_2_IDENTIFER_PATTERN = Pattern.compile("^([1-9]\\d?\\.){1,3}\\d{1,2}\\.?\\s(?:14C)?\\s*[a-z][0-9a-z \\[\\]]{2,50}", Pattern.CASE_INSENSITIVE);
public static final Pattern HEADLINE_WITH_SINGLE_IDENTIFER_PATTERN = Pattern.compile("^([0-9]\\.)\\s[a-z][0-9a-z \\[\\]]{2,50}", Pattern.CASE_INSENSITIVE);
public static final Pattern AT_LEAST_3_CHARS_PATTERN = Pattern.compile("\\p{L}{3,}", Pattern.CASE_INSENSITIVE);
public static final Pattern HEADLINE_PATTERN_WITH_SLASHES = Pattern.compile("^(\\d{1,1}\\.){1,3}\\d{1,2}\\.?\\s[a-z]{1,2}\\/[a-z]{1,2}.*");
public static final Pattern AMOUNT_PATTERN = Pattern.compile(
"^\\s*\\d+(?:\\.\\d+)?\\s*(?:ml|ul|μl|l|ug|μg|g|kg|mg|cm|cm2|cm3|mm|mm2|mm3|km|km2|m|m2|m3|lb|oz|ppm|dpm|days|weeks|months|%|f|ppb)\\b",
Pattern.CASE_INSENSITIVE);
public static final Pattern TABLE_OR_FIGURE_HEADLINE_PATTERN = Pattern.compile(
"^\\s*(?:table|continued\\s+table|appendix|figure)\\s+(?:[xvi]+|[a-z0-9]{1,3}(?:\\.[0-9]{1,3})*(?:-[0-9]{1,3})?)\\b",
Pattern.CASE_INSENSITIVE);
public static final Pattern TABLE_MID_SENTENCE_PATTERN = Pattern.compile(
"(?:table|continued\\s+table|appendix|figure)\\s+(?:[xvi]+|[a-z0-9]{1,3}(?:\\.[0-9]{1,3})*(?:-[0-9]{1,3})?)\\b",
Pattern.CASE_INSENSITIVE);
public static final Pattern ALPHANUMERIC = Pattern.compile("[a-zA-Z0-9]");
public static final Pattern NUMERIC = Pattern.compile("[0-9]+");
}

View File

@ -1,62 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.services.classification;
import java.util.Map;
import org.springframework.stereotype.Service;
import com.knecon.fforesight.service.layoutparser.internal.api.queue.LayoutParsingType;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationDocument;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.services.BodyTextFrameService;
import com.knecon.fforesight.service.layoutparser.processor.services.blockification.DocuMineBlockificationService;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class ClassificationService {
DocuMineBlockificationService docuMineBlockificationService;
BodyTextFrameService bodyTextFrameService;
TableOfContentsClassificationService tableOfContentsClassificationService;
RedactManagerClassificationService redactManagerClassificationService;
ClarifyndClassificationService clarifyndClassificationService;
DocuMineClassificationService docuMineClassificationService;
HeaderFooterClassificationService headerFooterClassificationService;
public void classify(ClassificationDocument document, LayoutParsingType layoutParsingType, Map<String, String> identifier) {
log.info("Calculating BodyTextFrame for {}", identifier);
bodyTextFrameService.setBodyTextFrames(document, layoutParsingType);
for (ClassificationPage page : document.getPages()) {
document.getLayoutDebugLayer().addCleanRulingVisualization(page.getCleanRulings(), page.getPageNumber());
}
log.info("Classify TextBlocks for {}", identifier);
headerFooterClassificationService.classifyHeadersAndFooters(document);
tableOfContentsClassificationService.classifyTableOfContents(document);
switch (layoutParsingType) {
case REDACT_MANAGER, REDACT_MANAGER_PARAGRAPH_DEBUG, REDACT_MANAGER_OLD, CLARIFYND_PARAGRAPH_DEBUG, REDACT_MANAGER_WITHOUT_DUPLICATE_PARAGRAPH ->
redactManagerClassificationService.classifyDocument(document);
case DOCUMINE_OLD, DOCUMINE -> docuMineClassificationService.classifyDocument(document);
case CLARIFYND -> clarifyndClassificationService.classifyDocument(document);
}
if (layoutParsingType.equals(LayoutParsingType.DOCUMINE_OLD)) {
for (ClassificationPage page : document.getPages()) {
docuMineBlockificationService.mergeblocks(page, page.getCleanRulings().withoutTextRulings(), 0, 10);
}
}
}
}

View File

@ -1,21 +1,9 @@
package com.knecon.fforesight.service.layoutparser.processor.services.classification; package com.knecon.fforesight.service.layoutparser.processor.services.classification;
import static com.knecon.fforesight.service.layoutparser.processor.services.classification.ClassificationPatterns.ALPHANUMERIC;
import static com.knecon.fforesight.service.layoutparser.processor.services.classification.ClassificationPatterns.AMOUNT_PATTERN;
import static com.knecon.fforesight.service.layoutparser.processor.services.classification.ClassificationPatterns.AT_LEAST_3_CHARS_PATTERN;
import static com.knecon.fforesight.service.layoutparser.processor.services.classification.ClassificationPatterns.HEADLINE_PATTERN_WITH_SLASHES;
import static com.knecon.fforesight.service.layoutparser.processor.services.classification.ClassificationPatterns.HEADLINE_WITH_2_IDENTIFER_PATTERN;
import static com.knecon.fforesight.service.layoutparser.processor.services.classification.ClassificationPatterns.HEADLINE_WITH_SINGLE_IDENTIFER_PATTERN;
import static com.knecon.fforesight.service.layoutparser.processor.services.classification.ClassificationPatterns.TABLE_MID_SENTENCE_PATTERN;
import static com.knecon.fforesight.service.layoutparser.processor.services.classification.ClassificationPatterns.TABLE_OR_FIGURE_HEADLINE_PATTERN;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@ -23,328 +11,142 @@ import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBl
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationDocument; import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationDocument;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage; import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType; import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType;
import com.knecon.fforesight.service.layoutparser.processor.model.text.AbstractBlockOnPage;
import com.knecon.fforesight.service.layoutparser.processor.model.text.ListIdentifier;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.utils.HeaderFooterDetection;
import com.knecon.fforesight.service.layoutparser.processor.utils.MarkedContentUtils;
import com.knecon.fforesight.service.layoutparser.processor.utils.PositionUtils; import com.knecon.fforesight.service.layoutparser.processor.utils.PositionUtils;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@Slf4j @Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class DocuMineClassificationService { public class DocuMineClassificationService {
public static final int SEPARATION_THRESHOLD = 10; // if the min distance between a textblock and all its surrounding blocks, the regexes can be more lenient. private static final Pattern HEADLINE_WITH_IDENTIFER_PATTERN = Pattern.compile("^([1-9]\\d?\\.){1,3}\\d{1,2}\\.?\\s[0-9A-Za-z \\[\\]]{2,50}", Pattern.CASE_INSENSITIVE);
public static final int SURROUNDING_BLOCKS_RADIUS = 3; // number of surrounding blocks before and after the current textblock to be tested private static final Pattern AT_LEAST_3_PATTERN = Pattern.compile("\\p{L}{3,}", Pattern.CASE_INSENSITIVE);
private static final Pattern HEADLINE_PATTTERN_WITH_SLASHES = Pattern.compile("^(\\d{1,1}\\.){1,3}\\d{1,2}\\.?\\s[a-z]{1,2}\\/[a-z]{1,2}.*");
ListItemClassificationService listItemClassificationService;
public void classifyDocument(ClassificationDocument document) { public void classifyDocument(ClassificationDocument document) {
List<Double> headlineFontSizes = buildHeadlineFontSizes(document); List<Double> headlineFontSizes = document.getFontSizeCounter().getHigherThanMostPopular();
List<AbstractBlockOnPage> blocks = buildBlocksPerPage(document);
log.debug("Headline FontSizes are: {}", headlineFontSizes); log.debug("Document FontSize counters are: {}", document.getFontSizeCounter().getCountPerValue());
HeadlineClassificationService headlineClassificationService = new HeadlineClassificationService(); HeadlineClassificationService headlineClassificationService = new HeadlineClassificationService();
for (int i = 0; i < blocks.size(); i++) { for (ClassificationPage page : document.getPages()) {
AbstractBlockOnPage block = blocks.get(i); classifyPage(headlineClassificationService, page, document, headlineFontSizes);
document.getLayoutDebugLayer().addTextBlockVisualizations(block.page().getTextBlocks(), block.page().getPageNumber());
classifyBlock(headlineClassificationService, i, blocks, document, headlineFontSizes);
} }
}
private void classifyPage(HeadlineClassificationService headlineClassificationService,
ClassificationPage page,
ClassificationDocument document,
List<Double> headlineFontSizes) {
for (AbstractPageBlock textBlock : page.getTextBlocks()) {
if (textBlock instanceof TextPageBlock) {
classifyBlock(headlineClassificationService, (TextPageBlock) textBlock, page, document, headlineFontSizes);
}
}
} }
private void classifyBlock(HeadlineClassificationService headlineClassificationService, private void classifyBlock(HeadlineClassificationService headlineClassificationService,
int currentIndex, TextPageBlock textBlock,
List<AbstractBlockOnPage> allBlocks, ClassificationPage page,
ClassificationDocument document, ClassificationDocument document,
List<Double> headlineFontSizes) { List<Double> headlineFontSizes) {
TextPageBlock textBlock;
if (allBlocks.get(currentIndex).block() instanceof TextPageBlock block) {
textBlock = block;
} else {
return;
}
ClassificationPage page = allBlocks.get(currentIndex).page();
List<AbstractPageBlock> surroundingBlocks = getSurroundingBlocksOnPage(currentIndex, allBlocks);
log.debug("headlineFontSizes: {}", headlineFontSizes); log.debug("headlineFontSizes: {}", headlineFontSizes);
var bodyTextFrame = page.getBodyTextFrame(); var bodyTextFrame = page.getBodyTextFrame();
Matcher headlineWith2IdentifierMatcher = HEADLINE_WITH_2_IDENTIFER_PATTERN.matcher(textBlock.toString()); Matcher headlineWithIdentifierMatcher = HEADLINE_WITH_IDENTIFER_PATTERN.matcher(textBlock.toString());
Matcher atLeast3Matcher = AT_LEAST_3_CHARS_PATTERN.matcher(textBlock.toString()); Matcher atLeast3Matcher = AT_LEAST_3_PATTERN.matcher(textBlock.toString());
Matcher headlineWithSlashesMatcher = HEADLINE_PATTERN_WITH_SLASHES.matcher(textBlock.toString()); Matcher headlineWithSlashesMatcher = HEADLINE_PATTTERN_WITH_SLASHES.matcher(textBlock.toString());
Matcher amountMatcher = AMOUNT_PATTERN.matcher(textBlock.toString());
Matcher tableOrFigureMatcher = TABLE_OR_FIGURE_HEADLINE_PATTERN.matcher(textBlock.toString());
Matcher tableMidSentenceMatcher = TABLE_MID_SENTENCE_PATTERN.matcher(textBlock.toString());
Matcher headlineWithSingleIdentifierMatcher = HEADLINE_WITH_SINGLE_IDENTIFER_PATTERN.matcher(textBlock.toString());
boolean isAtLeast3Characters = atLeast3Matcher.reset().find();
boolean headlineWithSlashesMatches = headlineWithSlashesMatcher.reset().matches();
boolean isAmount = amountMatcher.reset().find();
int charCount = countChars(textBlock);
boolean enoughChars = charCount > textBlock.getText().length() * 0.5;
List<ListIdentifier> listIdentifiers = listItemClassificationService.findConfirmedListIdentifiers(currentIndex, allBlocks);
document.getLayoutDebugLayer().addListIdentifiers(listIdentifiers);
if (textBlock.getClassification() != null && textBlock.getClassification().isHeadline()) { if (textBlock.getClassification() != null && textBlock.getClassification().isHeadline()) {
headlineClassificationService.setLastHeadlineFromOutline(textBlock); headlineClassificationService.setLastHeadlineFromOutline(textBlock);
return; return;
} }
if (textBlock.getClassification() != null && (textBlock.getClassification().equals(PageBlockType.HEADER)
|| textBlock.getClassification().equals(PageBlockType.FOOTER)
|| textBlock.getClassification().equals(PageBlockType.TABLE_OF_CONTENTS_ITEM))) {
return;
}
if (document.getFontSizeCounter().getMostPopular() == null) { if (document.getFontSizeCounter().getMostPopular() == null) {
textBlock.setClassification(PageBlockType.PARAGRAPH); textBlock.setClassification(PageBlockType.OTHER);
return; return;
} }
if (textBlock.getText().length() > 5 if (MarkedContentUtils.intersects(textBlock, page.getMarkedContentBboxPerType(), MarkedContentUtils.HEADER) //
&& greaterOrEqualFontThanDocumentAverage(textBlock, document) || (PositionUtils.isOverBodyTextFrame(bodyTextFrame, textBlock, page.getRotation()) //
&& PositionUtils.getApproxLineCount(textBlock) < 5.9 && (document.getFontSizeCounter().getMostPopular() == null //
&& ((textBlock.getMostPopularWordStyle().contains("bold") || textBlock.isUnderlined())// || textBlock.getHighestFontSize() <= document.getFontSizeCounter().getMostPopular()))) {
&& Character.isDigit(textBlock.toString().charAt(0)) // textBlock.setClassification(PageBlockType.HEADER);
&& isAtLeast3Characters //
&& !textBlock.toString().contains(":") //
|| textBlock.toString().startsWith("APPENDIX") //
|| textBlock.toString().startsWith("FIGURE") //
|| textBlock.toString().startsWith("Continued TABLE") //
|| textBlock.toString().startsWith("TABLE"))
&& !textBlock.toString().endsWith(":")
&& isAtLeast3Characters
&& !isAmount
&& enoughChars) {
setAsHeadline(headlineClassificationService, textBlock, document, headlineFontSizes); } else if (MarkedContentUtils.intersects(textBlock, page.getMarkedContentBboxPerType(), MarkedContentUtils.FOOTER)
} else if (isAllCaps(textBlock) || (PositionUtils.isUnderBodyTextFrame(bodyTextFrame,
&& ALPHANUMERIC.matcher(Character.toString(textBlock.getText().charAt(0))).matches() textBlock,
&& hasSeparation(textBlock, surroundingBlocks) page.getRotation())
&& textBlock.getText().length() > 5 && (document.getFontSizeCounter().getMostPopular()
&& isAtLeast3Characters == null
&& !isAmount || textBlock.getHighestFontSize()
&& enoughChars <= document.getFontSizeCounter()
&& !textBlock.toString().contains(":") .getMostPopular()))
&& !textBlock.toString().endsWith(".") || HeaderFooterDetection.isLikelyFooter(textBlock, document, page)) {
&& PositionUtils.getApproxLineCount(textBlock) < 2.9) { textBlock.setClassification(PageBlockType.FOOTER);
} else if (page.getPageNumber() == 1 && (PositionUtils.getHeightDifferenceBetweenChunkWordAndDocumentWord(textBlock, document.getTextHeightCounter().getMostPopular()) > 2.5
&& textBlock.getHighestFontSize() > document.getFontSizeCounter().getMostPopular() || page.getTextBlocks().size() == 1)) {
if (!Pattern.matches("[0-9]+", textBlock.toString())) {
textBlock.setClassification(PageBlockType.TITLE);
}
} else if (textBlock.getText().length() > 5
&& (textBlock.getMostPopularWordHeight() > document.getTextHeightCounter().getMostPopular()
|| textBlock.getMostPopularWordFontSize() > document.getFontSizeCounter().getMostPopular())
&& PositionUtils.getApproxLineCount(textBlock) < 5.9
setAsHeadline(headlineClassificationService, textBlock, document, headlineFontSizes); && (textBlock.getMostPopularWordStyle().contains("bold")
} else if (headlineWith2IdentifierMatcher.reset().find() && Character.isDigit(textBlock.toString().charAt(0))
&& atLeast3Matcher.reset().find()
&& !textBlock.toString().contains(":") //
|| textBlock.toString().equals(textBlock.toString().toUpperCase(Locale.ROOT)) && atLeast3Matcher.reset().find() && !textBlock.toString().contains(":") //
|| textBlock.toString().startsWith("APPENDIX") //
|| textBlock.toString().startsWith("FIGURE") //
|| textBlock.toString().startsWith("Continued TABLE") //
|| textBlock.toString().startsWith("TABLE"))
&& !textBlock.toString().endsWith(":")
&& atLeast3Matcher.reset().find()) {
PageBlockType headlineType = PageBlockType.getHeadlineType(1);
headlineClassificationService.classifyHeadline(textBlock, headlineType);
document.setHeadlines(true);
} else if (headlineWithIdentifierMatcher.reset().find()
&& PositionUtils.getApproxLineCount(textBlock) < 2.9 && PositionUtils.getApproxLineCount(textBlock) < 2.9
&& isAtLeast3Characters && atLeast3Matcher.reset().find()
&& !headlineWithSlashesMatches && !headlineWithSlashesMatcher.reset().matches()) {
&& !isAmount) { PageBlockType headlineType = PageBlockType.getHeadlineType(2);
headlineClassificationService.classifyHeadline(textBlock, headlineType);
setAsHeadline(headlineClassificationService, textBlock, document, headlineFontSizes); document.setHeadlines(true);
} else if (hasSeparation(textBlock, surroundingBlocks)//
&& greaterOrEqualFontThanPageAverage(textBlock, page)//
&& PositionUtils.getApproxLineCount(textBlock) < 2.9//
&& (tableOrFigureMatcher.reset().find() || (headlineWithSingleIdentifierMatcher.reset().find() && listIdentifiers.isEmpty())) //
&& tableMidSentenceMatcher.reset().results()
.count() <= 1 //
&& !isAmount//
&& !headlineWithSlashesMatches) {
setAsHeadline(headlineClassificationService, textBlock, document, headlineFontSizes);
// } else if (textBlock.getMostPopularWordFont().contains("bold")
// && greaterOrEqualFontThanPageAverage(textBlock, page)
// && textBlock.getWords().size() <= 6
// && PositionUtils.getApproxLineCount(textBlock) < 2.9
// && isAtLeast3Characters
// && charCount > textBlock.getText().length() * 0.75
// && !textBlock.getText().contains(":")
// && textBlock.getWidth() < page.getBodyTextFrame().getWidth() * 0.7) {
//
// setAsHeadline(headlineClassificationService, textBlock, document, headlineFontSizes);
} else if (!listIdentifiers.isEmpty()) {
textBlock.setClassification(PageBlockType.LIST_ITEM);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock) } else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)
&& textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular() && textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular()
&& textBlock.getMostPopularWordStyle().equals("bold") && textBlock.getMostPopularWordStyle().equals("bold")
&& !document.getFontStyleCounter().getMostPopular().equals("bold")) { && !document.getFontStyleCounter().getMostPopular().equals("bold")) {
textBlock.setClassification(PageBlockType.PARAGRAPH_BOLD); textBlock.setClassification(PageBlockType.PARAGRAPH_BOLD);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock) } else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)
&& textBlock.getMostPopularWordFont().equals(document.getFontCounter().getMostPopular()) && textBlock.getMostPopularWordFont().equals(document.getFontCounter().getMostPopular())
&& textBlock.getMostPopularWordStyle().equals(document.getFontStyleCounter().getMostPopular()) && textBlock.getMostPopularWordStyle().equals(document.getFontStyleCounter().getMostPopular())
&& textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular()) { && textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular()) {
textBlock.setClassification(PageBlockType.PARAGRAPH); textBlock.setClassification(PageBlockType.PARAGRAPH);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock) } else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)
&& textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular() && textBlock.getMostPopularWordFontSize() == document.getFontSizeCounter().getMostPopular()
&& textBlock.getMostPopularWordStyle().equals("italic") && textBlock.getMostPopularWordStyle().equals("italic")
&& !document.getFontStyleCounter().getMostPopular().equals("italic") && !document.getFontStyleCounter().getMostPopular().equals("italic")
&& PositionUtils.getApproxLineCount(textBlock) < 2.9) { && PositionUtils.getApproxLineCount(textBlock) < 2.9) {
textBlock.setClassification(PageBlockType.PARAGRAPH_ITALIC); textBlock.setClassification(PageBlockType.PARAGRAPH_ITALIC);
} else if (PositionUtils.isWithinBodyTextFrame(bodyTextFrame, textBlock)) {
textBlock.setClassification(PageBlockType.PARAGRAPH_UNKNOWN);
} else { } else {
textBlock.setClassification(PageBlockType.PARAGRAPH); textBlock.setClassification(PageBlockType.OTHER);
} }
} }
}
private int countChars(TextPageBlock textBlock) {
int count = 0;
for (int i = 0; i < textBlock.getText().length(); i++) {
if (Character.isAlphabetic(textBlock.getText().charAt(i))) {
count++;
}
}
return count;
}
private static boolean greaterOrEqualFontThanPageAverage(TextPageBlock textBlock, ClassificationPage page) {
return textBlock.getMostPopularWordHeight() >= page.getTextHeightCounter().getMostPopular() //
|| textBlock.getMostPopularWordFontSize() >= page.getFontSizeCounter().getMostPopular();
}
private static boolean greaterOrEqualFontThanDocumentAverage(TextPageBlock textBlock, ClassificationDocument document) {
return textBlock.getMostPopularWordHeight() > document.getTextHeightCounter().getMostPopular() //
|| textBlock.getMostPopularWordFontSize() > document.getFontSizeCounter().getMostPopular();
}
private static boolean isAllCaps(TextPageBlock textBlock) {
return textBlock.toString().equals(textBlock.toString().toUpperCase(Locale.ROOT));
}
private boolean hasSeparation(TextPageBlock textBlock, List<AbstractPageBlock> surroundingBlocks) {
return surroundingBlocks.stream()
.allMatch(surroundingBlock -> calculateSeparation(textBlock, surroundingBlock) > Math.pow(SEPARATION_THRESHOLD, 2));
}
private double calculateMinSeparation(TextPageBlock textBlock, List<AbstractPageBlock> surroundingBlocks) {
return surroundingBlocks.stream()
.mapToDouble(surroundingBlock -> calculateSeparation(textBlock, surroundingBlock))
.min().orElse(Double.MAX_VALUE);
}
private static double calculateSeparation(TextPageBlock textBlock, AbstractPageBlock surroundingBlock) {
return Math.pow(surroundingBlock.horizontalDistance(textBlock), 2) + Math.pow(surroundingBlock.verticalDistance(textBlock), 2);
}
private static void setAsHeadline(HeadlineClassificationService headlineClassificationService,
TextPageBlock textBlock,
ClassificationDocument document,
List<Double> headlineFontSizes) {
PageBlockType headlineType = HeadlineClassificationService.headlineClassByFontSize(textBlock, headlineFontSizes);
headlineClassificationService.classifyHeadline(textBlock, headlineType);
document.setHeadlines(true);
}
private List<AbstractBlockOnPage> buildBlocksPerPage(ClassificationDocument document) {
List<AbstractBlockOnPage> blocks = new ArrayList<>();
for (ClassificationPage page : document.getPages()) {
for (AbstractPageBlock abstractPageBlock : page.getTextBlocks()) {
if (abstractPageBlock instanceof TextPageBlock textBlock) {
if (textBlock.getClassification() != null && (textBlock.getClassification().equals(PageBlockType.HEADER) //
|| textBlock.getClassification().equals(PageBlockType.FOOTER))) {
continue;
}
blocks.add(new AbstractBlockOnPage(textBlock, page));
}
}
}
return blocks;
}
private static List<Double> buildHeadlineFontSizes(ClassificationDocument document) {
if (document.getFontSizeCounter().getCountPerValue().size() <= 6) {
return document.getFontSizeCounter().getValuesInReverseOrder();
}
List<Map.Entry<Double, Integer>> sortedEntries = new ArrayList<>(document.getFontSizeCounter().getCountPerValue().entrySet());
sortedEntries.sort(Map.Entry.comparingByKey());
int totalCount = sortedEntries.stream()
.mapToInt(Map.Entry::getValue).sum();
int cumulativeCount = 0;
Iterator<Map.Entry<Double, Integer>> iterator = sortedEntries.iterator();
while (iterator.hasNext()) {
Map.Entry<Double, Integer> entry = iterator.next();
cumulativeCount += entry.getValue();
if (cumulativeCount > totalCount * 0.3) {
break; // We've filtered the bottom 30%, so stop.
}
iterator.remove();
}
if (sortedEntries.size() < 6) {
return document.getFontSizeCounter().getValuesInReverseOrder();
}
int clusterSize = Math.max(1, sortedEntries.size() / 6);
List<List<Double>> clusters = new ArrayList<>();
for (int i = 0; i < 6; i++) {
clusters.add(new ArrayList<>());
}
for (int i = 0; i < sortedEntries.size(); i++) {
int clusterIndex = Math.min(i / clusterSize, 5);
clusters.get(clusterIndex).add(sortedEntries.get(i).getKey());
}
return clusters.stream()
.map(cluster -> cluster.stream()
.mapToDouble(d -> d).average()
.orElseThrow())
.sorted(Comparator.reverseOrder())
.toList();
}
private List<AbstractPageBlock> getSurroundingBlocksOnPage(int originalIndex, List<AbstractBlockOnPage> textBlocks) {
int start = Math.max(originalIndex - SURROUNDING_BLOCKS_RADIUS, 0);
int end = Math.min(originalIndex + SURROUNDING_BLOCKS_RADIUS, textBlocks.size());
List<AbstractPageBlock> surroundingBlocks = new ArrayList<>(2 * SURROUNDING_BLOCKS_RADIUS);
for (int i = start; i < end; i++) {
if (i == originalIndex) {
continue;
}
if (textBlocks.get(i).block().getText().length() <= 1) {
continue;
}
if (!textBlocks.get(i).page().equals(textBlocks.get(originalIndex).page())) {
continue;
}
surroundingBlocks.add(textBlocks.get(i).block());
}
return surroundingBlocks;
}
}

View File

@ -1,55 +0,0 @@
package com.knecon.fforesight.service.layoutparser.processor.services.classification;
import org.springframework.stereotype.Service;
import com.knecon.fforesight.service.layoutparser.processor.model.AbstractPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationDocument;
import com.knecon.fforesight.service.layoutparser.processor.model.ClassificationPage;
import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import com.knecon.fforesight.service.layoutparser.processor.utils.MarkedContentUtils;
import com.knecon.fforesight.service.layoutparser.processor.utils.PositionUtils;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
@Service
@RequiredArgsConstructor
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class HeaderFooterClassificationService {
public void classifyHeadersAndFooters(ClassificationDocument document) {
for (ClassificationPage page : document.getPages()) {
for (AbstractPageBlock pageBlock : page.getTextBlocks()) {
if (pageBlock instanceof TextPageBlock textBlock) {
classifyBlock(document, page, textBlock);
}
}
}
}
private static void classifyBlock(ClassificationDocument document, ClassificationPage page, TextPageBlock textBlock) {
if (MarkedContentUtils.intersects(textBlock, page.getMarkedContentBboxPerType(), MarkedContentUtils.HEADER)
|| PositionUtils.isOverBodyTextFrame(page.getBodyTextFrame(), textBlock, page.getRotation()) && smallerFontThanDocAverage(document, textBlock)) {
textBlock.setClassification(PageBlockType.HEADER);
} else if (MarkedContentUtils.intersects(textBlock, page.getMarkedContentBboxPerType(), MarkedContentUtils.FOOTER)
|| PositionUtils.isUnderBodyTextFrame(page.getBodyTextFrame(), textBlock, page.getRotation()) && smallerFontThanDocAverage(document, textBlock)) {
textBlock.setClassification(PageBlockType.FOOTER);
}
}
private static boolean smallerFontThanDocAverage(ClassificationDocument document, TextPageBlock textBlock) {
return document.getFontSizeCounter().getMostPopular() == null || textBlock.getHighestFontSize() <= document.getFontSizeCounter().getMostPopular();
}
}

View File

@ -2,10 +2,7 @@ package com.knecon.fforesight.service.layoutparser.processor.services.classifica
import static com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType.getHeadlineNumber; import static com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType.getHeadlineNumber;
import java.util.List;
import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType; import com.knecon.fforesight.service.layoutparser.processor.model.PageBlockType;
import com.knecon.fforesight.service.layoutparser.processor.model.SectionIdentifier;
import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock; import com.knecon.fforesight.service.layoutparser.processor.model.text.TextPageBlock;
import lombok.Getter; import lombok.Getter;
@ -19,7 +16,6 @@ public class HeadlineClassificationService {
PageBlockType originalClassifiedBlockType; PageBlockType originalClassifiedBlockType;
TextPageBlock lastHeadlineFromOutline; TextPageBlock lastHeadlineFromOutline;
public void setLastHeadlineFromOutline(TextPageBlock lastHeadlineFromOutline) { public void setLastHeadlineFromOutline(TextPageBlock lastHeadlineFromOutline) {
this.lastHeadlineFromOutline = lastHeadlineFromOutline; this.lastHeadlineFromOutline = lastHeadlineFromOutline;
@ -29,62 +25,28 @@ public class HeadlineClassificationService {
public void classifyHeadline(TextPageBlock textBlock, PageBlockType initialHeadlineType) { public void classifyHeadline(TextPageBlock textBlock, PageBlockType initialHeadlineType) {
TextPageBlock lastHeadline = getLastHeadline();
TextPageBlock lastHeadlineFromOutline = getLastHeadlineFromOutline();
PageBlockType originalClassifiedBlockType = getOriginalClassifiedBlockType();
PageBlockType finalHeadlineType = initialHeadlineType; PageBlockType finalHeadlineType = initialHeadlineType;
if (lastHeadline != null) { if (lastHeadline != null) {
finalHeadlineType = decideOnClassification(textBlock, initialHeadlineType); if (lastHeadline.equals(lastHeadlineFromOutline)) {
}
lastHeadline = textBlock; finalHeadlineType = PageBlockType.getHeadlineType(getHeadlineNumber(lastHeadline.getClassification()) + 1);
originalClassifiedBlockType = initialHeadlineType;
textBlock.setClassification(finalHeadlineType);
}
} else if (originalClassifiedBlockType != null && lastHeadline.getClassification() != originalClassifiedBlockType) {
private PageBlockType decideOnClassification(TextPageBlock textBlock, PageBlockType initialHeadlineType) { PageBlockType lastHeadlineType = lastHeadline.getClassification();
int difference = getHeadlineNumber(originalClassifiedBlockType) - getHeadlineNumber(lastHeadlineType);
SectionIdentifier identifier = SectionIdentifier.fromSearchText(textBlock.getText()); finalHeadlineType = PageBlockType.getHeadlineType(getHeadlineNumber(initialHeadlineType) - difference);
TextPageBlock lastHeadlineFromOutline = getLastHeadlineFromOutline();
PageBlockType originalClassifiedBlockType = getOriginalClassifiedBlockType();
if (!identifier.isEmpty()) {
return PageBlockType.getHeadlineType(identifier.level());
}
if (lastHeadline.equals(lastHeadlineFromOutline) && lastHeadline.getMostPopularWordFontSize() >= textBlock.getMostPopularWordFontSize()) {
return PageBlockType.getHeadlineType(getHeadlineNumber(lastHeadline.getClassification()) + 1);
} else if (originalClassifiedBlockType != null && lastHeadline.getClassification() != originalClassifiedBlockType) {
return adjustInitialLevelToLastHeadlineLevel(initialHeadlineType);
}
return initialHeadlineType;
}
private PageBlockType adjustInitialLevelToLastHeadlineLevel(PageBlockType initialHeadlineType) {
int difference = getHeadlineNumber(originalClassifiedBlockType) - getHeadlineNumber(lastHeadline.getClassification());
return PageBlockType.getHeadlineType(Math.max(1, getHeadlineNumber(initialHeadlineType) - difference));
}
public static PageBlockType headlineClassByFontSize(TextPageBlock textBlock, List<Double> fontSizeGroups) {
List<Double> distances = fontSizeGroups.stream()
.map(fontSize -> Math.abs(fontSize - textBlock.getMostPopularWordFontSize()))
.toList();
double min = Double.MAX_VALUE;
int argMin = -1;
for (int i = 0; i < distances.size(); i++) {
if (distances.get(i) < min) {
min = distances.get(i);
argMin = i;
} }
} }
return PageBlockType.getHeadlineType(argMin);
setOriginalClassifiedBlockType(initialHeadlineType);
textBlock.setClassification(finalHeadlineType);
setLastHeadline(textBlock);
} }
} }

Some files were not shown because too many files have changed in this diff Show More