Compare commits
No commits in common. "master" and "2.18.0" have entirely different histories.
@ -1,7 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
from functools import lru_cache, partial
|
from functools import lru_cache, partial
|
||||||
from itertools import chain, tee
|
from itertools import chain, tee
|
||||||
from typing import Iterable, Any
|
|
||||||
|
|
||||||
from funcy import rcompose, first, compose, second, chunks, identity, rpartial
|
from funcy import rcompose, first, compose, second, chunks, identity, rpartial
|
||||||
from kn_utils.logging import logger
|
from kn_utils.logging import logger
|
||||||
@ -55,7 +54,7 @@ class Pipeline:
|
|||||||
join = compose(starlift(lambda prd, rpr, mdt: {"classification": prd, **mdt, "representation": rpr}), star(zip))
|
join = compose(starlift(lambda prd, rpr, mdt: {"classification": prd, **mdt, "representation": rpr}), star(zip))
|
||||||
|
|
||||||
# />--classify--\
|
# />--classify--\
|
||||||
# --extract-->--split--+->--encode---->+--join-->reformat-->filter_duplicates
|
# --extract-->--split--+->--encode---->+--join-->reformat
|
||||||
# \>--identity--/
|
# \>--identity--/
|
||||||
|
|
||||||
self.pipe = rcompose(
|
self.pipe = rcompose(
|
||||||
@ -64,7 +63,6 @@ class Pipeline:
|
|||||||
pairwise_apply(classify, represent, identity), # ... apply functions to the streams pairwise
|
pairwise_apply(classify, represent, identity), # ... apply functions to the streams pairwise
|
||||||
join, # ... the streams by zipping
|
join, # ... the streams by zipping
|
||||||
reformat, # ... the items
|
reformat, # ... the items
|
||||||
filter_duplicates, # ... filter out duplicate images
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def __call__(self, pdf: bytes, page_range: range = None):
|
def __call__(self, pdf: bytes, page_range: range = None):
|
||||||
@ -74,32 +72,3 @@ class Pipeline:
|
|||||||
unit=" images",
|
unit=" images",
|
||||||
disable=not self.verbose,
|
disable=not self.verbose,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def filter_duplicates(metadata: Iterable[dict[str, Any]]) -> Iterable[dict[str, Any]]:
|
|
||||||
"""Filter out duplicate images from the `position` (image coordinates) and `page`, preferring the one with
|
|
||||||
`allPassed` set to True.
|
|
||||||
See RED-10765 (RM-241): Removed redactions reappear for why this is necessary.
|
|
||||||
"""
|
|
||||||
keep = dict()
|
|
||||||
for image_meta in metadata:
|
|
||||||
key: tuple[int, int, int, int, int] = (
|
|
||||||
image_meta["position"]["x1"],
|
|
||||||
image_meta["position"]["x2"],
|
|
||||||
image_meta["position"]["y1"],
|
|
||||||
image_meta["position"]["y2"],
|
|
||||||
image_meta["position"]["pageNumber"],
|
|
||||||
)
|
|
||||||
if key in keep:
|
|
||||||
logger.warning(
|
|
||||||
f"Duplicate image found: x1={key[0]}, x2={key[1]}, y1={key[2]}, y2={key[3]}, pageNumber={key[4]}"
|
|
||||||
)
|
|
||||||
if image_meta["filters"]["allPassed"]:
|
|
||||||
logger.warning("Setting the image with allPassed flag set to True")
|
|
||||||
keep[key] = image_meta
|
|
||||||
else:
|
|
||||||
logger.warning("Keeping the previous image since the current image has allPassed flag set to False")
|
|
||||||
else:
|
|
||||||
keep[key] = image_meta
|
|
||||||
|
|
||||||
yield from keep.values()
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
outs:
|
outs:
|
||||||
- md5: 08bf8a63f04b3f19f859008556699708.dir
|
- md5: ab352d3b2c62ce2293cafb57c1b41b01.dir
|
||||||
size: 7979836
|
size: 7469082
|
||||||
nfiles: 7
|
nfiles: 6
|
||||||
path: data
|
path: data
|
||||||
|
|||||||
@ -1,35 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from image_prediction.config import CONFIG
|
|
||||||
from image_prediction.pipeline import load_pipeline
|
|
||||||
|
|
||||||
|
|
||||||
def test_all_duplicate_images_are_filtered():
|
|
||||||
"""See RED-10765 (RM-241): Removed redactions reappear."""
|
|
||||||
pdf_path = (
|
|
||||||
Path(__file__).parents[1]
|
|
||||||
/ "data"
|
|
||||||
/ "RED-10765"
|
|
||||||
/ "RM-241-461c90d6d6dc0416ad5f0b05feef4dfc.UNTOUCHED_shortened.pdf"
|
|
||||||
)
|
|
||||||
pdf_bytes = pdf_path.read_bytes()
|
|
||||||
|
|
||||||
pipeline = load_pipeline(verbose=True, batch_size=CONFIG.service.batch_size)
|
|
||||||
predictions = list(pipeline(pdf_bytes))
|
|
||||||
|
|
||||||
seen = set()
|
|
||||||
for prediction in predictions:
|
|
||||||
key = (
|
|
||||||
prediction["position"]["x1"],
|
|
||||||
prediction["position"]["x2"],
|
|
||||||
prediction["position"]["y1"],
|
|
||||||
prediction["position"]["y2"],
|
|
||||||
prediction["position"]["pageNumber"],
|
|
||||||
)
|
|
||||||
assert key not in seen, f"Duplicate found: {key}"
|
|
||||||
seen.add(key)
|
|
||||||
|
|
||||||
all_passed = sum(1 for prediction in predictions if prediction["filters"]["allPassed"])
|
|
||||||
assert all_passed == 1, f"Expected 1 image with allPassed flag set to True, but got {all_passed}"
|
|
||||||
|
|
||||||
assert len(predictions) == 177, f"Expected 177 images, but got {len(predictions)}"
|
|
||||||
Loading…
x
Reference in New Issue
Block a user