From a4fa73deaafb1bbffc81ea99a171d12120c304ab Mon Sep 17 00:00:00 2001 From: Julius Unverfehrt Date: Wed, 23 Feb 2022 10:51:24 +0100 Subject: [PATCH] Pull request #14: optional debug progress bar added Merge in RR/fb-detr from add-debug-progress-bar to master Squashed commit of the following: commit 3449be1b46f73a5e9ae3719ed2821a1b7faca9e4 Author: Matthias Bisping Date: Wed Feb 23 10:26:47 2022 +0100 refactoring; added VERBOSE flag to config commit e50234e205dfd7a40aaf7981da85e28048d9efba Merge: 89703ca f6c51be Author: Matthias Bisping Date: Wed Feb 23 09:45:33 2022 +0100 Merge branch 'config_changes' into add-debug-progress-bar commit f6c51beeaa952c18c80b7af6b7a46b9de8f521c3 Author: Matthias Bisping Date: Wed Feb 23 09:44:00 2022 +0100 added env var commit 89703caa776f0fad55757ab22568e45949b2b310 Author: Julius Unverfehrt Date: Wed Feb 23 08:28:52 2022 +0100 optional debug progress bar added --- config.yaml | 5 +++-- fb_detr/predictor.py | 14 ++++++++++---- src/serve.py | 4 ---- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/config.yaml b/config.yaml index 7569e6b..2a202b4 100644 --- a/config.yaml +++ b/config.yaml @@ -11,5 +11,6 @@ webserver: mode: $SERVER_MODE|production # webserver mode: {development, production} service: - logging_level: DEBUG - batch_size: $BATCH_SIZE|2 # Number of images in memory simultaneously per service instance + logging_level: $LOGGING_LEVEL_ROOT|DEBUG # Logging level for service logger + batch_size: $BATCH_SIZE|2 # Number of images in memory simultaneously + verbose: $VERBOSE|True # Service prints document processing progress to stdout diff --git a/fb_detr/predictor.py b/fb_detr/predictor.py index a8f89eb..e083dbf 100644 --- a/fb_detr/predictor.py +++ b/fb_detr/predictor.py @@ -139,6 +139,15 @@ class Predictor: return predictions def predict_pdf(self, pdf: bytes): + def progress(generator): + + page_count = get_page_count(pdf) + batch_count = int(page_count / CONFIG.service.batch_size) + + yield from tqdm( + generator, total=batch_count, position=1, leave=True + ) if CONFIG.service.verbose else generator + def predict_batch(batch_idx, batch): predictions = self.predict(batch) for p in predictions: @@ -146,11 +155,8 @@ class Predictor: return predictions - page_count = get_page_count(pdf) - batch_count = int(page_count / CONFIG.service.batch_size) - page_stream = stream_pages(pdf) page_batches = chunk_iterable(page_stream, CONFIG.service.batch_size) - predictions = list(chain(*starmap(predict_batch, tqdm(enumerate(page_batches), total=batch_count)))) + predictions = list(chain(*starmap(predict_batch, progress(enumerate(page_batches))))) return predictions diff --git a/src/serve.py b/src/serve.py index c1c9deb..fa44ef9 100644 --- a/src/serve.py +++ b/src/serve.py @@ -1,16 +1,12 @@ import argparse -import json import logging -from itertools import chain from typing import Callable from flask import Flask, request, jsonify -from pdf2image import pdf2image from waitress import serve from fb_detr.config import CONFIG from fb_detr.utils.estimator import suppress_userwarnings, initialize_predictor -from fb_detr.utils.stream import stream_pages, chunk_iterable def parse_args():