Spaces:
Sleeping
Sleeping
tyrwh
commited on
Commit
·
cd7bce8
1
Parent(s):
bae5c31
Major overhaul of backend and frontend
Browse files- app.py +228 -223
- static/script.js +94 -63
app.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
import uuid
|
| 3 |
import traceback
|
|
@@ -7,7 +9,6 @@ import zipfile
|
|
| 7 |
import cv2
|
| 8 |
import csv
|
| 9 |
import pickle
|
| 10 |
-
import json
|
| 11 |
import shutil
|
| 12 |
from ultralytics import YOLO
|
| 13 |
from ultralytics.utils import ThreadingLocked
|
|
@@ -15,7 +16,7 @@ import numpy as np
|
|
| 15 |
import pandas as pd
|
| 16 |
from torch import cuda
|
| 17 |
from flask import Flask, Response, render_template, request, jsonify, send_from_directory, send_file, session, redirect, url_for
|
| 18 |
-
from multiprocessing.pool import
|
| 19 |
from pathlib import Path
|
| 20 |
from PIL import Image
|
| 21 |
from datetime import datetime
|
|
@@ -28,13 +29,19 @@ app.secret_key = os.environ.get('FLASK_SECRET_KEY', str(uuid.uuid4())) # For se
|
|
| 28 |
APP_ROOT = Path(__file__).parent
|
| 29 |
UPLOAD_FOLDER = APP_ROOT / 'uploads'
|
| 30 |
RESULTS_FOLDER = APP_ROOT / 'results'
|
|
|
|
| 31 |
WEIGHTS_FILE = APP_ROOT / 'weights.pt'
|
| 32 |
app.config['UPLOAD_FOLDER'] = str(UPLOAD_FOLDER)
|
| 33 |
app.config['RESULTS_FOLDER'] = str(RESULTS_FOLDER)
|
|
|
|
|
|
|
| 34 |
app.config['ALLOWED_EXTENSIONS'] = {'png', 'jpg', 'jpeg', 'tif', 'tiff'}
|
| 35 |
|
| 36 |
UPLOAD_FOLDER.mkdir(parents=True, exist_ok=True)
|
| 37 |
RESULTS_FOLDER.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
|
|
|
| 38 |
|
| 39 |
@app.errorhandler(Exception)
|
| 40 |
def handle_exception(e):
|
|
@@ -49,7 +56,6 @@ def allowed_file(filename):
|
|
| 49 |
def index():
|
| 50 |
return render_template('index.html')
|
| 51 |
|
| 52 |
-
|
| 53 |
# Load model once at startup, use CUDA if available
|
| 54 |
MODEL_DEVICE = 'cuda' if cuda.is_available() else 'cpu'
|
| 55 |
_model = None
|
|
@@ -81,238 +87,273 @@ def upload_files():
|
|
| 81 |
upload_dir.mkdir(parents=True, exist_ok=True)
|
| 82 |
# generate new unique filenames via uuid, save the mapping dict of old:new to session
|
| 83 |
filename_map = {}
|
|
|
|
| 84 |
for f in files:
|
| 85 |
orig_name = secure_filename(f.filename)
|
| 86 |
ext = Path(orig_name).suffix
|
| 87 |
-
|
| 88 |
-
|
|
|
|
| 89 |
f.save(str(file_path))
|
| 90 |
-
filename_map[
|
|
|
|
| 91 |
session['filename_map'] = filename_map
|
|
|
|
| 92 |
return jsonify({'filename_map': filename_map, 'status': 'uploaded'})
|
| 93 |
|
| 94 |
-
#
|
| 95 |
-
@
|
| 96 |
-
def
|
| 97 |
-
|
| 98 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 99 |
with open(pickle_path, 'wb') as pf:
|
| 100 |
-
pickle.dump(
|
| 101 |
-
return
|
| 102 |
|
| 103 |
@app.route('/process', methods=['POST'])
|
| 104 |
-
def
|
| 105 |
-
model = get_model()
|
| 106 |
session_id = session['id']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 107 |
filename_map = session.get('filename_map', {})
|
| 108 |
upload_dir = Path(app.config['UPLOAD_FOLDER']) / session_id
|
| 109 |
-
state = {}
|
| 110 |
-
state['status'] = 'starting'
|
| 111 |
-
state['progress'] = 0
|
| 112 |
-
state['filename_map'] = filename_map
|
| 113 |
-
state['jobId'] = session['id']
|
| 114 |
-
session['job_state'] = state
|
| 115 |
-
|
| 116 |
-
# create a results_dir, clean out old one if needed
|
| 117 |
results_dir = Path(app.config['RESULTS_FOLDER']) / session_id
|
|
|
|
| 118 |
if results_dir.exists():
|
| 119 |
shutil.rmtree(results_dir)
|
| 120 |
results_dir.mkdir(parents=True)
|
| 121 |
|
| 122 |
-
# set up
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
upload_dir / filename_map[orig_name],
|
| 126 |
-
results_dir / f"{Path(orig_name).stem}_results.pkl",
|
| 127 |
-
model) for orig_name in filename_map.keys()]
|
| 128 |
try:
|
| 129 |
-
all_detections = {}
|
| 130 |
-
state['status'] = 'processing'
|
| 131 |
-
session['job_state'] = state
|
| 132 |
if MODEL_DEVICE == 'cuda':
|
| 133 |
-
|
| 134 |
-
for idx, args in enumerate(arg_list):
|
| 135 |
-
orig_name, img_results = process_single_image(args)
|
| 136 |
-
all_detections[orig_name] = img_results
|
| 137 |
-
state['progress'] = int((idx + 1) / n_img * 100)
|
| 138 |
-
session['job_state'] = state
|
| 139 |
else:
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
# Save all detections to a pickled file
|
| 147 |
-
detections_path = results_dir / 'all_detections.pkl'
|
| 148 |
-
with open(detections_path, 'wb') as f:
|
| 149 |
-
pickle.dump(all_detections, f)
|
| 150 |
-
state['status'] = 'completed'
|
| 151 |
-
state['progress'] = 100
|
| 152 |
-
session['job_state'] = state
|
| 153 |
-
except Exception as e:
|
| 154 |
-
print(f"Error in /process: {e}")
|
| 155 |
-
print(traceback.format_exc())
|
| 156 |
-
state['status'] = 'error'
|
| 157 |
-
state['error'] = str(e)
|
| 158 |
-
state['progress'] = 100
|
| 159 |
-
session['job_state'] = state
|
| 160 |
-
resp = {
|
| 161 |
-
'status': state.get('status', 'unknown'),
|
| 162 |
-
'progress': state.get('progress', 0),
|
| 163 |
-
'jobId': state.get('jobId'),
|
| 164 |
-
'error': state.get('error'),
|
| 165 |
-
}
|
| 166 |
-
return jsonify(resp)
|
| 167 |
-
|
| 168 |
-
# Support /progress/<jobId> for frontend polling
|
| 169 |
-
@app.route('/progress/<jobId>')
|
| 170 |
-
def get_progress_with_id(jobId):
|
| 171 |
-
try:
|
| 172 |
-
job_state = session.get('job_state')
|
| 173 |
-
if not job_state:
|
| 174 |
-
print(f"/progress/{jobId}: No job_state found in session.")
|
| 175 |
-
return jsonify({"status": "error", "error": "No job state"}), 404
|
| 176 |
-
resp = {
|
| 177 |
-
'status': job_state.get('status', 'unknown'),
|
| 178 |
-
'progress': job_state.get('progress', 0),
|
| 179 |
-
'jobId': session.get('id'),
|
| 180 |
-
'error': job_state.get('error'),
|
| 181 |
}
|
| 182 |
-
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 191 |
except Exception as e:
|
| 192 |
-
print(f"Error in /
|
| 193 |
print(traceback.format_exc())
|
| 194 |
-
return jsonify({
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 195 |
|
| 196 |
# /annotate route for dynamic annotation
|
| 197 |
@app.route('/annotate', methods=['POST'])
|
| 198 |
def annotate_image():
|
| 199 |
try:
|
| 200 |
data = request.get_json()
|
| 201 |
-
|
| 202 |
confidence = float(data.get('confidence', 0.5))
|
| 203 |
session_id = session['id']
|
| 204 |
-
|
| 205 |
-
|
| 206 |
-
|
|
|
|
|
|
|
| 207 |
return jsonify({'error': 'File not found'}), 404
|
|
|
|
| 208 |
# Load detections from pickle
|
| 209 |
-
result_path = Path(app.config['RESULTS_FOLDER']) / session_id / f"{
|
| 210 |
if not result_path.exists():
|
| 211 |
return jsonify({'error': 'Results not found'}), 404
|
| 212 |
with open(result_path, 'rb') as pf:
|
| 213 |
detections = pickle.load(pf)
|
| 214 |
-
|
| 215 |
-
img_path = Path(app.config['UPLOAD_FOLDER']) / session_id /
|
| 216 |
-
img =
|
| 217 |
-
#
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
|
| 221 |
-
|
| 222 |
-
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
|
| 226 |
-
|
| 227 |
-
|
| 228 |
-
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
return send_file(
|
| 232 |
-
io.BytesIO(f.read()),
|
| 233 |
-
mimetype='image/png',
|
| 234 |
-
as_attachment=False,
|
| 235 |
-
download_name=out_name
|
| 236 |
-
)
|
| 237 |
except Exception as e:
|
| 238 |
print(f"Error in /annotate: {e}")
|
| 239 |
return jsonify({'error': str(e)}), 500
|
| 240 |
|
| 241 |
-
@app.route('/
|
| 242 |
-
def download_file(filename):
|
| 243 |
-
try:
|
| 244 |
-
session_id = session['id']
|
| 245 |
-
if '..' in filename or filename.startswith('/'):
|
| 246 |
-
return jsonify({"error": "Invalid filename"}), 400
|
| 247 |
-
safe_filename = secure_filename(filename)
|
| 248 |
-
file_dir = Path(app.config['RESULTS_FOLDER']) / session_id
|
| 249 |
-
file_path = (file_dir / safe_filename).resolve()
|
| 250 |
-
if not str(file_path).startswith(str(file_dir.resolve())):
|
| 251 |
-
print(f"Attempted path traversal: {session_id}/{filename}")
|
| 252 |
-
return jsonify({"error": "Invalid file path"}), 400
|
| 253 |
-
if not file_path.is_file():
|
| 254 |
-
if not file_dir.exists():
|
| 255 |
-
return jsonify({"error": f"Session directory {session_id} not found"}), 404
|
| 256 |
-
files_in_dir = list(file_dir.iterdir())
|
| 257 |
-
return jsonify({"error": f"File '{filename}' not found in session '{session_id}'. Available: {[f.name for f in files_in_dir]}"}), 404
|
| 258 |
-
|
| 259 |
-
if filename.lower().endswith(('.tif', '.tiff')):
|
| 260 |
-
try:
|
| 261 |
-
with Image.open(file_path) as img:
|
| 262 |
-
img = img.convert('RGBA') if img.mode in ('RGBA', 'LA') or (img.mode == 'P' and 'transparency' in img.info) else img.convert('RGB')
|
| 263 |
-
img_byte_arr = io.BytesIO()
|
| 264 |
-
img.save(img_byte_arr, format='PNG')
|
| 265 |
-
img_byte_arr.seek(0)
|
| 266 |
-
return send_file(
|
| 267 |
-
img_byte_arr,
|
| 268 |
-
mimetype='image/png',
|
| 269 |
-
as_attachment=False,
|
| 270 |
-
download_name=f"{Path(filename).stem}.png"
|
| 271 |
-
)
|
| 272 |
-
except Exception as e:
|
| 273 |
-
print(f"Error converting TIF to PNG: {e}")
|
| 274 |
-
return jsonify({"error": "Could not convert TIF image"}), 500
|
| 275 |
-
|
| 276 |
-
mime_type = None
|
| 277 |
-
if safe_filename.lower().endswith(('.png', '.jpg', '.jpeg')):
|
| 278 |
-
try:
|
| 279 |
-
with Image.open(file_path) as img:
|
| 280 |
-
mime_type = 'image/jpeg' if img.format == 'JPEG' else 'image/png'
|
| 281 |
-
except Exception as img_err:
|
| 282 |
-
print(f"Could not determine MIME type for {safe_filename}: {img_err}")
|
| 283 |
-
|
| 284 |
-
if safe_filename.lower() == "results.csv":
|
| 285 |
-
mime_type = 'text/csv'
|
| 286 |
-
return send_file(
|
| 287 |
-
str(file_path),
|
| 288 |
-
mimetype=mime_type,
|
| 289 |
-
as_attachment=True,
|
| 290 |
-
download_name=safe_filename
|
| 291 |
-
)
|
| 292 |
-
|
| 293 |
-
return send_file(str(file_path), mimetype=mime_type)
|
| 294 |
-
except Exception as e:
|
| 295 |
-
error_message = f"File serving error: {str(e)}"
|
| 296 |
-
print(error_message)
|
| 297 |
-
return jsonify({"error": "Server error", "log": error_message}), 500
|
| 298 |
-
|
| 299 |
-
@app.route('/export_images')
|
| 300 |
def export_images():
|
| 301 |
try:
|
|
|
|
|
|
|
| 302 |
session_id = session['id']
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
if not annotated_files:
|
| 309 |
-
return jsonify({"error": "No annotated images found"}), 404
|
| 310 |
|
|
|
|
| 311 |
memory_file = io.BytesIO()
|
| 312 |
with zipfile.ZipFile(memory_file, 'w', zipfile.ZIP_DEFLATED) as zf:
|
| 313 |
-
|
| 314 |
-
|
| 315 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 316 |
memory_file.seek(0)
|
| 317 |
timestamp = datetime.now().strftime('%Y%m%d-%H%M%S')
|
| 318 |
|
|
@@ -359,42 +400,6 @@ def export_csv():
|
|
| 359 |
print(error_message)
|
| 360 |
return jsonify({"error": "Server error", "log": error_message}), 500
|
| 361 |
|
| 362 |
-
@app.route('/export_images', methods=['POST'])
|
| 363 |
-
def export_images_post():
|
| 364 |
-
try:
|
| 365 |
-
data = request.json
|
| 366 |
-
session_id = session['id']
|
| 367 |
-
threshold = float(data.get('confidence', 0.5))
|
| 368 |
-
job_state = session.get('job_state')
|
| 369 |
-
if not job_state:
|
| 370 |
-
return jsonify({'error': 'Job not found'}), 404
|
| 371 |
-
memory_file = io.BytesIO()
|
| 372 |
-
with zipfile.ZipFile(memory_file, 'w', zipfile.ZIP_DEFLATED) as zf:
|
| 373 |
-
for orig_name, detections in job_state['detections'].items():
|
| 374 |
-
unique_name = job_state['filename_map'][orig_name]
|
| 375 |
-
img_path = Path(app.config['UPLOAD_FOLDER']) / session_id / unique_name
|
| 376 |
-
img = cv2.imread(str(img_path), cv2.IMREAD_UNCHANGED)
|
| 377 |
-
filtered = [d for d in detections if d['score'] >= threshold]
|
| 378 |
-
for det in filtered:
|
| 379 |
-
x1, y1, x2, y2 = map(int, det['bbox'])
|
| 380 |
-
cv2.rectangle(img, (x1, y1), (x2, y2), (0,0,255), 3)
|
| 381 |
-
out_name = f"{Path(orig_name).stem}.png"
|
| 382 |
-
_, img_bytes = cv2.imencode('.png', img)
|
| 383 |
-
zf.writestr(out_name, img_bytes.tobytes())
|
| 384 |
-
memory_file.seek(0)
|
| 385 |
-
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
| 386 |
-
return send_file(
|
| 387 |
-
memory_file,
|
| 388 |
-
mimetype='application/zip',
|
| 389 |
-
as_attachment=True,
|
| 390 |
-
download_name=f'nemaquant_annotated_{timestamp}.zip'
|
| 391 |
-
)
|
| 392 |
-
except Exception as e:
|
| 393 |
-
error_message = f"Error exporting images: {str(e)}"
|
| 394 |
-
print(error_message)
|
| 395 |
-
return jsonify({"error": "Server error", "log": error_message}), 500
|
| 396 |
-
|
| 397 |
-
|
| 398 |
|
| 399 |
def print_startup_info():
|
| 400 |
print("----- NemaQuant Flask App Starting -----")
|
|
|
|
| 1 |
+
|
| 2 |
+
|
| 3 |
import os
|
| 4 |
import uuid
|
| 5 |
import traceback
|
|
|
|
| 9 |
import cv2
|
| 10 |
import csv
|
| 11 |
import pickle
|
|
|
|
| 12 |
import shutil
|
| 13 |
from ultralytics import YOLO
|
| 14 |
from ultralytics.utils import ThreadingLocked
|
|
|
|
| 16 |
import pandas as pd
|
| 17 |
from torch import cuda
|
| 18 |
from flask import Flask, Response, render_template, request, jsonify, send_from_directory, send_file, session, redirect, url_for
|
| 19 |
+
from multiprocessing.pool import Pool
|
| 20 |
from pathlib import Path
|
| 21 |
from PIL import Image
|
| 22 |
from datetime import datetime
|
|
|
|
| 29 |
APP_ROOT = Path(__file__).parent
|
| 30 |
UPLOAD_FOLDER = APP_ROOT / 'uploads'
|
| 31 |
RESULTS_FOLDER = APP_ROOT / 'results'
|
| 32 |
+
ANNOT_FOLDER = APP_ROOT / 'annotated'
|
| 33 |
WEIGHTS_FILE = APP_ROOT / 'weights.pt'
|
| 34 |
app.config['UPLOAD_FOLDER'] = str(UPLOAD_FOLDER)
|
| 35 |
app.config['RESULTS_FOLDER'] = str(RESULTS_FOLDER)
|
| 36 |
+
app.config['WEIGHTS_FILE'] = str(WEIGHTS_FILE)
|
| 37 |
+
app.config['ANNOT_FOLDER'] = str(ANNOT_FOLDER)
|
| 38 |
app.config['ALLOWED_EXTENSIONS'] = {'png', 'jpg', 'jpeg', 'tif', 'tiff'}
|
| 39 |
|
| 40 |
UPLOAD_FOLDER.mkdir(parents=True, exist_ok=True)
|
| 41 |
RESULTS_FOLDER.mkdir(parents=True, exist_ok=True)
|
| 42 |
+
ANNOT_FOLDER.mkdir(parents=True, exist_ok=True)
|
| 43 |
+
|
| 44 |
+
async_results = {} # this has to be global I think
|
| 45 |
|
| 46 |
@app.errorhandler(Exception)
|
| 47 |
def handle_exception(e):
|
|
|
|
| 56 |
def index():
|
| 57 |
return render_template('index.html')
|
| 58 |
|
|
|
|
| 59 |
# Load model once at startup, use CUDA if available
|
| 60 |
MODEL_DEVICE = 'cuda' if cuda.is_available() else 'cpu'
|
| 61 |
_model = None
|
|
|
|
| 87 |
upload_dir.mkdir(parents=True, exist_ok=True)
|
| 88 |
# generate new unique filenames via uuid, save the mapping dict of old:new to session
|
| 89 |
filename_map = {}
|
| 90 |
+
uuid_map_to_uuid_imgname = {}
|
| 91 |
for f in files:
|
| 92 |
orig_name = secure_filename(f.filename)
|
| 93 |
ext = Path(orig_name).suffix
|
| 94 |
+
uuid_base = uuid.uuid4().hex
|
| 95 |
+
uuid_name = f"{uuid_base}{ext}"
|
| 96 |
+
file_path = upload_dir / uuid_name
|
| 97 |
f.save(str(file_path))
|
| 98 |
+
filename_map[uuid_base] = orig_name
|
| 99 |
+
uuid_map_to_uuid_imgname[uuid_base] = uuid_name
|
| 100 |
session['filename_map'] = filename_map
|
| 101 |
+
session['uuid_map_to_uuid_imgname'] = uuid_map_to_uuid_imgname
|
| 102 |
return jsonify({'filename_map': filename_map, 'status': 'uploaded'})
|
| 103 |
|
| 104 |
+
# /preview route for serving original uploaded image
|
| 105 |
+
@app.route('/preview', methods=['POST'])
|
| 106 |
+
def preview_image():
|
| 107 |
+
try:
|
| 108 |
+
data = request.get_json()
|
| 109 |
+
uuid = data.get('uuid')
|
| 110 |
+
session_id = session['id']
|
| 111 |
+
uuid_map_to_uuid_imgname = session.get('uuid_map_to_uuid_imgname', {})
|
| 112 |
+
img_name = uuid_map_to_uuid_imgname.get(uuid)
|
| 113 |
+
if not img_name:
|
| 114 |
+
print(f"/preview: No img_name found for uuid {uuid}")
|
| 115 |
+
return jsonify({'error': 'File not found'}), 404
|
| 116 |
+
img_path = Path(app.config['UPLOAD_FOLDER']) / session_id / img_name
|
| 117 |
+
if not img_path.exists():
|
| 118 |
+
print(f"/preview: File does not exist at {img_path}")
|
| 119 |
+
return jsonify({'error': 'File not found'}), 404
|
| 120 |
+
# Determine MIME type
|
| 121 |
+
ext = img_path.suffix.lower()
|
| 122 |
+
if ext in ['.jpg', '.jpeg']:
|
| 123 |
+
mimetype = 'image/jpeg'
|
| 124 |
+
elif ext in ['.png']:
|
| 125 |
+
mimetype = 'image/png'
|
| 126 |
+
elif ext in ['.tif', '.tiff']:
|
| 127 |
+
mimetype = 'image/tiff'
|
| 128 |
+
else:
|
| 129 |
+
mimetype = 'application/octet-stream'
|
| 130 |
+
return send_file(
|
| 131 |
+
str(img_path),
|
| 132 |
+
mimetype=mimetype,
|
| 133 |
+
as_attachment=False,
|
| 134 |
+
download_name=img_name
|
| 135 |
+
)
|
| 136 |
+
except Exception as e:
|
| 137 |
+
print(f"Error in /preview: {e}")
|
| 138 |
+
return jsonify({'error': str(e)}), 500
|
| 139 |
+
|
| 140 |
+
# initializer for Pool to load model in each process
|
| 141 |
+
# each worker will have its own model instance
|
| 142 |
+
def init_worker(model_path):
|
| 143 |
+
global model
|
| 144 |
+
model = YOLO(model_path)
|
| 145 |
+
if MODEL_DEVICE == 'cuda':
|
| 146 |
+
model.to('cuda')
|
| 147 |
+
|
| 148 |
+
# not sure if we need this decorator anymore?
|
| 149 |
+
#@ThreadingLocked()
|
| 150 |
+
def process_single_image(img_path, results_dir):
|
| 151 |
+
global model
|
| 152 |
+
uuid_base = img_path.stem
|
| 153 |
+
pickle_path = results_dir / f"{uuid_base}.pkl"
|
| 154 |
+
results = detect_in_image(model, str(img_path))
|
| 155 |
with open(pickle_path, 'wb') as pf:
|
| 156 |
+
pickle.dump(results, pf)
|
| 157 |
+
return uuid_base
|
| 158 |
|
| 159 |
@app.route('/process', methods=['POST'])
|
| 160 |
+
def start_processing():
|
|
|
|
| 161 |
session_id = session['id']
|
| 162 |
+
job_state = {
|
| 163 |
+
"status": "starting",
|
| 164 |
+
"progress": 0,
|
| 165 |
+
"sessionId": session_id
|
| 166 |
+
}
|
| 167 |
+
session['job_state'] = job_state
|
| 168 |
filename_map = session.get('filename_map', {})
|
| 169 |
upload_dir = Path(app.config['UPLOAD_FOLDER']) / session_id
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 170 |
results_dir = Path(app.config['RESULTS_FOLDER']) / session_id
|
| 171 |
+
# clean out old results if needed
|
| 172 |
if results_dir.exists():
|
| 173 |
shutil.rmtree(results_dir)
|
| 174 |
results_dir.mkdir(parents=True)
|
| 175 |
|
| 176 |
+
# set up iterable of uploaded files to process
|
| 177 |
+
arg_list = [(x,results_dir) for x in list(upload_dir.iterdir())]
|
| 178 |
+
|
|
|
|
|
|
|
|
|
|
| 179 |
try:
|
|
|
|
|
|
|
|
|
|
| 180 |
if MODEL_DEVICE == 'cuda':
|
| 181 |
+
n_proc = 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 182 |
else:
|
| 183 |
+
n_proc = os.cpu_count()
|
| 184 |
+
# Initialize job state
|
| 185 |
+
job_state = {
|
| 186 |
+
"status": "starting",
|
| 187 |
+
"progress": 0,
|
| 188 |
+
"started": True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 189 |
}
|
| 190 |
+
session['job_state'] = job_state
|
| 191 |
+
pool = Pool(processes=n_proc,
|
| 192 |
+
initializer=init_worker,
|
| 193 |
+
initargs=(str(WEIGHTS_FILE),))
|
| 194 |
+
async_results[session_id] = pool.starmap_async(process_single_image, arg_list)
|
| 195 |
+
pool.close()
|
| 196 |
+
|
| 197 |
+
# Update job state after process launch
|
| 198 |
+
job_state["status"] = "processing"
|
| 199 |
+
session['job_state'] = job_state
|
| 200 |
+
return jsonify({'status': 'processing',
|
| 201 |
+
'sessionId': session_id
|
| 202 |
+
})
|
| 203 |
except Exception as e:
|
| 204 |
+
print(f"Error in /process: {e}")
|
| 205 |
print(traceback.format_exc())
|
| 206 |
+
return jsonify({'error': str(e),
|
| 207 |
+
'status': 'unknown',
|
| 208 |
+
'sessionId': session_id}), 500
|
| 209 |
+
|
| 210 |
+
@app.route('/progress')
|
| 211 |
+
def get_progress():
|
| 212 |
+
session_id = session['id']
|
| 213 |
+
try:
|
| 214 |
+
job_state = session.get('job_state')
|
| 215 |
+
if not job_state:
|
| 216 |
+
print("/progress: No job_state found in session.")
|
| 217 |
+
return jsonify({"status": "error", "error": "No job state"}), 404
|
| 218 |
+
|
| 219 |
+
results_dir = Path(app.config['RESULTS_FOLDER']) / session_id
|
| 220 |
+
uploads_dir = Path(app.config['UPLOAD_FOLDER']) / session_id
|
| 221 |
+
n_results = len(list(results_dir.glob('*.pkl')))
|
| 222 |
+
n_uploads = len(list(uploads_dir.iterdir()))
|
| 223 |
+
|
| 224 |
+
# If async_result is ready, verify completion and update job state
|
| 225 |
+
async_result = async_results.get(session_id)
|
| 226 |
+
if async_result and async_result.ready():
|
| 227 |
+
if n_results == n_uploads:
|
| 228 |
+
job_state['status'] = 'completed'
|
| 229 |
+
job_state['progress'] = 100
|
| 230 |
+
session['job_state'] = job_state
|
| 231 |
+
resp = {
|
| 232 |
+
'status': 'completed',
|
| 233 |
+
'progress': 100,
|
| 234 |
+
'filename_map': session.get('filename_map', {}),
|
| 235 |
+
'session_id': job_state.get('sessionId'),
|
| 236 |
+
'error': job_state.get('error'),
|
| 237 |
+
}
|
| 238 |
+
# Aggregate results into a single response object
|
| 239 |
+
all_results = {}
|
| 240 |
+
for pkl_file in results_dir.glob('*.pkl'):
|
| 241 |
+
uuid_base = pkl_file.stem
|
| 242 |
+
with open(pkl_file, 'rb') as pf:
|
| 243 |
+
all_results[uuid_base] = pickle.load(pf)
|
| 244 |
+
resp['results'] = all_results
|
| 245 |
+
return jsonify(resp)
|
| 246 |
+
|
| 247 |
+
# If still processing, update progress
|
| 248 |
+
if job_state.get('status') == 'processing':
|
| 249 |
+
progress = int((n_results / n_uploads) * 100) if n_uploads > 0 else 0
|
| 250 |
+
job_state['progress'] = progress
|
| 251 |
+
session['job_state'] = job_state
|
| 252 |
+
resp = {
|
| 253 |
+
'status': 'processing',
|
| 254 |
+
'progress': progress,
|
| 255 |
+
'sessionId': session_id,
|
| 256 |
+
}
|
| 257 |
+
return jsonify(resp)
|
| 258 |
+
# Default response as a catchall
|
| 259 |
+
resp = {
|
| 260 |
+
'status': job_state.get('status', 'unknown'),
|
| 261 |
+
'progress': job_state.get('progress', 0),
|
| 262 |
+
'sessionId': job_state.get('session_id'),
|
| 263 |
+
'error': job_state.get('error'),
|
| 264 |
+
}
|
| 265 |
+
return jsonify(resp)
|
| 266 |
+
except Exception as e:
|
| 267 |
+
print(f"Error in /progress: {e}")
|
| 268 |
+
print(traceback.format_exc())
|
| 269 |
+
return jsonify({"status": "error", "error": str(e)}), 500
|
| 270 |
+
|
| 271 |
+
def read_img_and_draw(img_path, detections, confidence):
|
| 272 |
+
img = cv2.imread(str(img_path), cv2.IMREAD_UNCHANGED)
|
| 273 |
+
filtered = [d for d in detections if d.get('score', 0) >= confidence]
|
| 274 |
+
for det in filtered:
|
| 275 |
+
x1, y1, x2, y2 = map(int, det['bbox'])
|
| 276 |
+
cv2.rectangle(img, (x1, y1), (x2, y2), (0,0,255), 3)
|
| 277 |
+
return img
|
| 278 |
|
| 279 |
# /annotate route for dynamic annotation
|
| 280 |
@app.route('/annotate', methods=['POST'])
|
| 281 |
def annotate_image():
|
| 282 |
try:
|
| 283 |
data = request.get_json()
|
| 284 |
+
uuid = data.get('uuid')
|
| 285 |
confidence = float(data.get('confidence', 0.5))
|
| 286 |
session_id = session['id']
|
| 287 |
+
uuid_map_to_uuid_imgname = session.get('uuid_map_to_uuid_imgname', {})
|
| 288 |
+
img_name = uuid_map_to_uuid_imgname.get(uuid)
|
| 289 |
+
orig_img_name = session['filename_map'].get(uuid)
|
| 290 |
+
|
| 291 |
+
if not img_name:
|
| 292 |
return jsonify({'error': 'File not found'}), 404
|
| 293 |
+
|
| 294 |
# Load detections from pickle
|
| 295 |
+
result_path = Path(app.config['RESULTS_FOLDER']) / session_id / f"{uuid}.pkl"
|
| 296 |
if not result_path.exists():
|
| 297 |
return jsonify({'error': 'Results not found'}), 404
|
| 298 |
with open(result_path, 'rb') as pf:
|
| 299 |
detections = pickle.load(pf)
|
| 300 |
+
|
| 301 |
+
img_path = Path(app.config['UPLOAD_FOLDER']) / session_id / img_name
|
| 302 |
+
img = read_img_and_draw(img_path, detections, confidence)
|
| 303 |
+
# Save annotated image out
|
| 304 |
+
annot_dir = Path(app.config['ANNOT_FOLDER']) / session_id
|
| 305 |
+
annot_dir.mkdir(parents=True, exist_ok=True)
|
| 306 |
+
annot_imgname = f"{uuid}_annotated.png"
|
| 307 |
+
annot_imgpath = str(annot_dir / annot_imgname)
|
| 308 |
+
cv2.imwrite(annot_imgpath, img)
|
| 309 |
+
|
| 310 |
+
# Serve image directly from disk
|
| 311 |
+
return send_file(
|
| 312 |
+
annot_imgpath,
|
| 313 |
+
mimetype='image/png',
|
| 314 |
+
as_attachment=False,
|
| 315 |
+
download_name=annot_imgname
|
| 316 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 317 |
except Exception as e:
|
| 318 |
print(f"Error in /annotate: {e}")
|
| 319 |
return jsonify({'error': str(e)}), 500
|
| 320 |
|
| 321 |
+
@app.route('/export_images', methods=['POST'])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 322 |
def export_images():
|
| 323 |
try:
|
| 324 |
+
data = request.get_json()
|
| 325 |
+
confidence = float(data.get('confidence', 0.5))
|
| 326 |
session_id = session['id']
|
| 327 |
+
filename_map = session.get('filename_map', {})
|
| 328 |
+
uuid_map_to_uuid_imgname = session.get('uuid_map_to_uuid_imgname', {})
|
| 329 |
+
# ensure there's a landing spot
|
| 330 |
+
annot_dir = Path(app.config['ANNOT_FOLDER']) / session_id
|
| 331 |
+
annot_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
| 332 |
|
| 333 |
+
# add all annotated files to zip
|
| 334 |
memory_file = io.BytesIO()
|
| 335 |
with zipfile.ZipFile(memory_file, 'w', zipfile.ZIP_DEFLATED) as zf:
|
| 336 |
+
# iterate through all uuids
|
| 337 |
+
for uuid in filename_map.keys():
|
| 338 |
+
img_name = uuid_map_to_uuid_imgname.get(uuid)
|
| 339 |
+
if not img_name:
|
| 340 |
+
continue
|
| 341 |
+
img_path = Path(app.config['UPLOAD_FOLDER']) / session_id / img_name
|
| 342 |
+
result_path = Path(app.config['RESULTS_FOLDER']) / session_id / f"{uuid}.pkl"
|
| 343 |
+
if not result_path.exists():
|
| 344 |
+
return jsonify({'error': 'Results not found'}), 404
|
| 345 |
+
if not img_path.exists():
|
| 346 |
+
return jsonify({'error': 'Image not found'}), 404
|
| 347 |
+
with open(result_path, 'rb') as pf:
|
| 348 |
+
detections = pickle.load(pf)
|
| 349 |
+
img = read_img_and_draw(img_path, detections, confidence)
|
| 350 |
+
# clean the name
|
| 351 |
+
orig_name = filename_map.get(uuid)
|
| 352 |
+
annot_imgname = f"{str(Path(orig_name).stem)}_annotated.png"
|
| 353 |
+
annot_imgpath = str(annot_dir / annot_imgname)
|
| 354 |
+
cv2.imwrite(annot_imgpath, img)
|
| 355 |
+
zf.write(annot_imgpath, annot_imgname)
|
| 356 |
+
# timestamp for filename
|
| 357 |
memory_file.seek(0)
|
| 358 |
timestamp = datetime.now().strftime('%Y%m%d-%H%M%S')
|
| 359 |
|
|
|
|
| 400 |
print(error_message)
|
| 401 |
return jsonify({"error": "Server error", "log": error_message}), 500
|
| 402 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 403 |
|
| 404 |
def print_startup_info():
|
| 405 |
print("----- NemaQuant Flask App Starting -----")
|
static/script.js
CHANGED
|
@@ -32,6 +32,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 32 |
let currentImageIndex = -1;
|
| 33 |
let currentJobId = null;
|
| 34 |
let currentZoomLevel = 1;
|
|
|
|
| 35 |
const MAX_ZOOM = 3;
|
| 36 |
const MIN_ZOOM = 0.5;
|
| 37 |
let progressInterval = null; // Interval timer for polling
|
|
@@ -271,12 +272,25 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 271 |
if (response.ok) {
|
| 272 |
const data = await response.json();
|
| 273 |
logStatus('Files uploaded successfully.');
|
| 274 |
-
|
|
|
|
|
|
|
| 275 |
resultsTableBody.innerHTML = '';
|
| 276 |
-
Object.
|
| 277 |
const row = resultsTableBody.insertRow();
|
| 278 |
row.dataset.originalIndex = idx;
|
| 279 |
-
row.innerHTML =
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 280 |
});
|
| 281 |
} else {
|
| 282 |
logStatus('File upload failed.');
|
|
@@ -306,22 +320,22 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 306 |
} else {
|
| 307 |
uploadText.textContent = `${validFileCount} image${validFileCount === 1 ? '' : 's'} selected`;
|
| 308 |
startProcessingBtn.disabled = validFileCount === 0;
|
| 309 |
-
// Populate results table with
|
| 310 |
resultsTableBody.innerHTML = '';
|
| 311 |
-
|
| 312 |
const row = resultsTableBody.insertRow();
|
| 313 |
row.dataset.originalIndex = idx;
|
| 314 |
row.innerHTML = `
|
| 315 |
-
<td>${
|
| 316 |
-
<td>
|
| 317 |
-
<td><button class="view-button" data-index="${idx}">View</button></td>
|
| 318 |
`;
|
| 319 |
});
|
| 320 |
// Add click event for View buttons
|
| 321 |
resultsTableBody.querySelectorAll('.view-button').forEach(btn => {
|
| 322 |
btn.addEventListener('click', (e) => {
|
| 323 |
-
const
|
| 324 |
-
displayImage(
|
| 325 |
});
|
| 326 |
});
|
| 327 |
}
|
|
@@ -351,8 +365,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 351 |
setLoading(true);
|
| 352 |
logStatus('Starting upload and processing...');
|
| 353 |
updateProgress(0, 'Uploading files...');
|
| 354 |
-
resultsTableBody
|
| 355 |
-
clearPreview();
|
| 356 |
currentResults = [];
|
| 357 |
if (progressInterval) {
|
| 358 |
clearInterval(progressInterval);
|
|
@@ -389,12 +402,12 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 389 |
throw new Error(data.error);
|
| 390 |
}
|
| 391 |
// --- ASYNC JOB: Start polling for progress ---
|
| 392 |
-
if (data.
|
| 393 |
-
logStatus(`Processing started. Job ID: ${data.
|
| 394 |
-
currentJobId = data.
|
| 395 |
pollProgress(currentJobId);
|
| 396 |
} else {
|
| 397 |
-
logStatus('Error: No
|
| 398 |
setLoading(false);
|
| 399 |
}
|
| 400 |
} catch (error) {
|
|
@@ -411,12 +424,15 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 411 |
// --- Filtering and Table Update ---
|
| 412 |
function updateResultsTable() {
|
| 413 |
const threshold = parseFloat(confidenceSlider.value);
|
| 414 |
-
// Use allDetections (array of {
|
| 415 |
-
const
|
| 416 |
currentResults = allDetections.map(imgResult => {
|
| 417 |
const filtered = imgResult.detections.filter(det => det.score >= threshold);
|
|
|
|
|
|
|
| 418 |
return {
|
| 419 |
-
|
|
|
|
| 420 |
num_eggs: filtered.length,
|
| 421 |
detections: filtered
|
| 422 |
};
|
|
@@ -429,8 +445,8 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 429 |
displayResultsPage(currentPage);
|
| 430 |
// Try to restore previous image if it still exists
|
| 431 |
let newIndex = 0;
|
| 432 |
-
if (
|
| 433 |
-
newIndex = currentResults.findIndex(r => r.
|
| 434 |
if (newIndex === -1) newIndex = 0;
|
| 435 |
}
|
| 436 |
currentImageIndex = newIndex;
|
|
@@ -470,30 +486,44 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 470 |
|
| 471 |
// --- Replace displayImage to use backend-annotated PNG ---
|
| 472 |
async function displayImage(index) {
|
| 473 |
-
//
|
| 474 |
-
let
|
| 475 |
-
|
| 476 |
-
|
| 477 |
-
|
| 478 |
-
|
| 479 |
} else {
|
| 480 |
-
|
|
|
|
| 481 |
}
|
| 482 |
-
|
| 483 |
const confidence = parseFloat(confidenceSlider.value);
|
| 484 |
try {
|
| 485 |
-
|
| 486 |
-
|
| 487 |
-
|
| 488 |
-
|
| 489 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 490 |
if (response.ok) {
|
| 491 |
const blob = await response.blob();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 492 |
previewImage.onload = function() {
|
| 493 |
updateImageInfo();
|
| 494 |
zoomInBtn.disabled = false;
|
| 495 |
zoomOutBtn.disabled = false;
|
| 496 |
-
// Calculate which page this image should be on
|
| 497 |
const targetPage = Math.floor(index / RESULTS_PER_PAGE) + 1;
|
| 498 |
if (currentPage !== targetPage) {
|
| 499 |
currentPage = targetPage;
|
|
@@ -507,9 +537,10 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 507 |
}
|
| 508 |
resetPanZoom();
|
| 509 |
};
|
| 510 |
-
previewImage.src =
|
| 511 |
-
previewImage.alt =
|
| 512 |
} else {
|
|
|
|
| 513 |
clearPreview();
|
| 514 |
}
|
| 515 |
} catch (e) {
|
|
@@ -520,14 +551,14 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 520 |
}
|
| 521 |
|
| 522 |
// --- New Polling Function ---
|
| 523 |
-
function pollProgress(
|
| 524 |
if (progressInterval) {
|
| 525 |
clearInterval(progressInterval); // Clear any existing timer
|
| 526 |
}
|
| 527 |
|
| 528 |
progressInterval = setInterval(async () => {
|
| 529 |
try {
|
| 530 |
-
const response = await fetch(`/progress
|
| 531 |
if (!response.ok) {
|
| 532 |
let errorText = `Progress check failed: ${response.status}`;
|
| 533 |
try {
|
|
@@ -547,10 +578,9 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 547 |
break;
|
| 548 |
case 'processing':
|
| 549 |
updateProgress(data.progress || 0, `Processing (${data.progress || 0}%)`);
|
| 550 |
-
logStatus(`Processing images... (${data.progress || 0}%)`);
|
| 551 |
// If results are present, update detections and table
|
| 552 |
if (data.results) {
|
| 553 |
-
allDetections = Object.entries(data.results).map(([
|
| 554 |
updateResultsTable();
|
| 555 |
}
|
| 556 |
break;
|
|
@@ -560,7 +590,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 560 |
updateProgress(100, 'Processing complete');
|
| 561 |
logStatus('Processing finished successfully.');
|
| 562 |
if (data.results) {
|
| 563 |
-
allDetections = Object.entries(data.results).map(([
|
| 564 |
updateResultsTable();
|
| 565 |
}
|
| 566 |
renderConfidencePlot();
|
|
@@ -684,9 +714,9 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 684 |
|
| 685 |
// If job is not completed, show filenames only
|
| 686 |
if (jobStatus !== 'completed') {
|
| 687 |
-
Object.
|
| 688 |
const row = resultsTableBody.insertRow();
|
| 689 |
-
row.innerHTML = `<td>${
|
| 690 |
});
|
| 691 |
exportCsvBtn.disabled = true;
|
| 692 |
exportImagesBtn.disabled = true;
|
|
@@ -696,13 +726,13 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 696 |
|
| 697 |
// If job is completed, show filtered detection counts
|
| 698 |
if (resultsObj) {
|
| 699 |
-
Object.
|
| 700 |
-
const detections = resultsObj[filename] || [];
|
| 701 |
// Filter by confidence threshold
|
| 702 |
const threshold = parseFloat(confidenceSlider.value);
|
| 703 |
const filtered = detections.filter(d => d.score >= threshold);
|
|
|
|
| 704 |
const row = resultsTableBody.insertRow();
|
| 705 |
-
row.innerHTML = `<td>${
|
| 706 |
});
|
| 707 |
exportCsvBtn.disabled = false;
|
| 708 |
exportImagesBtn.disabled = false;
|
|
@@ -723,23 +753,24 @@ document.addEventListener('DOMContentLoaded', () => {
|
|
| 723 |
// Display results for current page
|
| 724 |
for (let i = startIndex; i < endIndex; i++) {
|
| 725 |
const result = currentResults[i];
|
|
|
|
| 726 |
const row = resultsTableBody.insertRow();
|
| 727 |
-
|
| 728 |
-
|
| 729 |
-
|
| 730 |
-
|
| 731 |
-
|
| 732 |
-
|
| 733 |
-
|
| 734 |
-
|
| 735 |
-
|
| 736 |
-
|
| 737 |
-
|
| 738 |
-
|
| 739 |
-
|
| 740 |
-
|
| 741 |
-
|
| 742 |
-
|
| 743 |
// Wire up View buttons after rows are created
|
| 744 |
resultsTableBody.querySelectorAll('.view-button').forEach(btn => {
|
| 745 |
btn.addEventListener('click', (e) => {
|
|
|
|
| 32 |
let currentImageIndex = -1;
|
| 33 |
let currentJobId = null;
|
| 34 |
let currentZoomLevel = 1;
|
| 35 |
+
let filenameMap = {};
|
| 36 |
const MAX_ZOOM = 3;
|
| 37 |
const MIN_ZOOM = 0.5;
|
| 38 |
let progressInterval = null; // Interval timer for polling
|
|
|
|
| 272 |
if (response.ok) {
|
| 273 |
const data = await response.json();
|
| 274 |
logStatus('Files uploaded successfully.');
|
| 275 |
+
filenameMap = data.filename_map || {};
|
| 276 |
+
|
| 277 |
+
// Update results table with filenames and View buttons
|
| 278 |
resultsTableBody.innerHTML = '';
|
| 279 |
+
Object.entries(filenameMap).forEach(([uuid, originalFilename], idx) => {
|
| 280 |
const row = resultsTableBody.insertRow();
|
| 281 |
row.dataset.originalIndex = idx;
|
| 282 |
+
row.innerHTML = `
|
| 283 |
+
<td>${originalFilename}</td>
|
| 284 |
+
<td style="color:#bbb;">NA</td>
|
| 285 |
+
<td><button class="view-button" data-index="${idx}">View</button></td>
|
| 286 |
+
`;
|
| 287 |
+
});
|
| 288 |
+
// Add click event for View buttons
|
| 289 |
+
resultsTableBody.querySelectorAll('.view-button').forEach(btn => {
|
| 290 |
+
btn.addEventListener('click', (e) => {
|
| 291 |
+
const idx = parseInt(btn.dataset.index, 10);
|
| 292 |
+
displayImage(idx);
|
| 293 |
+
});
|
| 294 |
});
|
| 295 |
} else {
|
| 296 |
logStatus('File upload failed.');
|
|
|
|
| 320 |
} else {
|
| 321 |
uploadText.textContent = `${validFileCount} image${validFileCount === 1 ? '' : 's'} selected`;
|
| 322 |
startProcessingBtn.disabled = validFileCount === 0;
|
| 323 |
+
// Populate results table with uuid/filename pairs from filenameMap after upload
|
| 324 |
resultsTableBody.innerHTML = '';
|
| 325 |
+
Object.entries(filenameMap).forEach(([uuid, originalFilename], idx) => {
|
| 326 |
const row = resultsTableBody.insertRow();
|
| 327 |
row.dataset.originalIndex = idx;
|
| 328 |
row.innerHTML = `
|
| 329 |
+
<td>${originalFilename}</td>
|
| 330 |
+
<td style="color:#bbb;">NA</td>
|
| 331 |
+
<td><button class="view-button" data-uuid="${uuid}" data-index="${idx}">View</button></td>
|
| 332 |
`;
|
| 333 |
});
|
| 334 |
// Add click event for View buttons
|
| 335 |
resultsTableBody.querySelectorAll('.view-button').forEach(btn => {
|
| 336 |
btn.addEventListener('click', (e) => {
|
| 337 |
+
const uuid = btn.getAttribute('data-uuid');
|
| 338 |
+
displayImage(uuid);
|
| 339 |
});
|
| 340 |
});
|
| 341 |
}
|
|
|
|
| 365 |
setLoading(true);
|
| 366 |
logStatus('Starting upload and processing...');
|
| 367 |
updateProgress(0, 'Uploading files...');
|
| 368 |
+
// Do not clear resultsTableBody or preview image so users can browse existing results during processing
|
|
|
|
| 369 |
currentResults = [];
|
| 370 |
if (progressInterval) {
|
| 371 |
clearInterval(progressInterval);
|
|
|
|
| 402 |
throw new Error(data.error);
|
| 403 |
}
|
| 404 |
// --- ASYNC JOB: Start polling for progress ---
|
| 405 |
+
if (data.sessionId) {
|
| 406 |
+
// logStatus(`Processing started. Job ID: ${data.sessionId}`);
|
| 407 |
+
currentJobId = data.sessionId;
|
| 408 |
pollProgress(currentJobId);
|
| 409 |
} else {
|
| 410 |
+
logStatus('Error: No sessionId returned from server.');
|
| 411 |
setLoading(false);
|
| 412 |
}
|
| 413 |
} catch (error) {
|
|
|
|
| 424 |
// --- Filtering and Table Update ---
|
| 425 |
function updateResultsTable() {
|
| 426 |
const threshold = parseFloat(confidenceSlider.value);
|
| 427 |
+
// Use allDetections (array of {uuid, detections}) for filtering
|
| 428 |
+
const prevUuid = (currentImageIndex >= 0 && currentResults[currentImageIndex]) ? currentResults[currentImageIndex].uuid : null;
|
| 429 |
currentResults = allDetections.map(imgResult => {
|
| 430 |
const filtered = imgResult.detections.filter(det => det.score >= threshold);
|
| 431 |
+
// Use filenameMap to convert uuid to original filename for display
|
| 432 |
+
const originalFilename = filenameMap[imgResult.uuid] || imgResult.uuid;
|
| 433 |
return {
|
| 434 |
+
uuid: imgResult.uuid,
|
| 435 |
+
filename: originalFilename,
|
| 436 |
num_eggs: filtered.length,
|
| 437 |
detections: filtered
|
| 438 |
};
|
|
|
|
| 445 |
displayResultsPage(currentPage);
|
| 446 |
// Try to restore previous image if it still exists
|
| 447 |
let newIndex = 0;
|
| 448 |
+
if (prevUuid) {
|
| 449 |
+
newIndex = currentResults.findIndex(r => r.uuid === prevUuid);
|
| 450 |
if (newIndex === -1) newIndex = 0;
|
| 451 |
}
|
| 452 |
currentImageIndex = newIndex;
|
|
|
|
| 486 |
|
| 487 |
// --- Replace displayImage to use backend-annotated PNG ---
|
| 488 |
async function displayImage(index) {
|
| 489 |
+
// Always use uuid, not filename, for backend requests
|
| 490 |
+
let uuid = index;
|
| 491 |
+
// If index is a number, get uuid from filenameMap
|
| 492 |
+
if (typeof index === 'number') {
|
| 493 |
+
uuid = Object.keys(filenameMap)[index];
|
| 494 |
+
currentImageIndex = index;
|
| 495 |
} else {
|
| 496 |
+
// If index is a uuid string, find its index for navigation
|
| 497 |
+
currentImageIndex = Object.keys(filenameMap).indexOf(index);
|
| 498 |
}
|
| 499 |
+
let isCompleted = allDetections && allDetections.length > 0;
|
| 500 |
const confidence = parseFloat(confidenceSlider.value);
|
| 501 |
try {
|
| 502 |
+
let response;
|
| 503 |
+
if (isCompleted) {
|
| 504 |
+
response = await fetch('/annotate', {
|
| 505 |
+
method: 'POST',
|
| 506 |
+
headers: { 'Content-Type': 'application/json' },
|
| 507 |
+
body: JSON.stringify({ uuid: uuid, confidence })
|
| 508 |
+
});
|
| 509 |
+
} else {
|
| 510 |
+
response = await fetch('/preview', {
|
| 511 |
+
method: 'POST',
|
| 512 |
+
headers: { 'Content-Type': 'application/json' },
|
| 513 |
+
body: JSON.stringify({ uuid: uuid })
|
| 514 |
+
});
|
| 515 |
+
}
|
| 516 |
if (response.ok) {
|
| 517 |
const blob = await response.blob();
|
| 518 |
+
console.log((isCompleted ? 'Annotate' : 'Preview') + ' image blob type:', blob.type, 'uuid:', uuid);
|
| 519 |
+
if (previewImage.src && previewImage.src.startsWith('blob:')) {
|
| 520 |
+
URL.revokeObjectURL(previewImage.src);
|
| 521 |
+
}
|
| 522 |
+
const objectUrl = URL.createObjectURL(blob);
|
| 523 |
previewImage.onload = function() {
|
| 524 |
updateImageInfo();
|
| 525 |
zoomInBtn.disabled = false;
|
| 526 |
zoomOutBtn.disabled = false;
|
|
|
|
| 527 |
const targetPage = Math.floor(index / RESULTS_PER_PAGE) + 1;
|
| 528 |
if (currentPage !== targetPage) {
|
| 529 |
currentPage = targetPage;
|
|
|
|
| 537 |
}
|
| 538 |
resetPanZoom();
|
| 539 |
};
|
| 540 |
+
previewImage.src = objectUrl;
|
| 541 |
+
previewImage.alt = uuid;
|
| 542 |
} else {
|
| 543 |
+
console.error((isCompleted ? 'Annotate' : 'Preview') + ' image fetch failed:', response.status);
|
| 544 |
clearPreview();
|
| 545 |
}
|
| 546 |
} catch (e) {
|
|
|
|
| 551 |
}
|
| 552 |
|
| 553 |
// --- New Polling Function ---
|
| 554 |
+
function pollProgress(sessionId) {
|
| 555 |
if (progressInterval) {
|
| 556 |
clearInterval(progressInterval); // Clear any existing timer
|
| 557 |
}
|
| 558 |
|
| 559 |
progressInterval = setInterval(async () => {
|
| 560 |
try {
|
| 561 |
+
const response = await fetch(`/progress`);
|
| 562 |
if (!response.ok) {
|
| 563 |
let errorText = `Progress check failed: ${response.status}`;
|
| 564 |
try {
|
|
|
|
| 578 |
break;
|
| 579 |
case 'processing':
|
| 580 |
updateProgress(data.progress || 0, `Processing (${data.progress || 0}%)`);
|
|
|
|
| 581 |
// If results are present, update detections and table
|
| 582 |
if (data.results) {
|
| 583 |
+
allDetections = Object.entries(data.results).map(([uuid, detections]) => ({ uuid, detections }));
|
| 584 |
updateResultsTable();
|
| 585 |
}
|
| 586 |
break;
|
|
|
|
| 590 |
updateProgress(100, 'Processing complete');
|
| 591 |
logStatus('Processing finished successfully.');
|
| 592 |
if (data.results) {
|
| 593 |
+
allDetections = Object.entries(data.results).map(([uuid, detections]) => ({ uuid, detections }));
|
| 594 |
updateResultsTable();
|
| 595 |
}
|
| 596 |
renderConfidencePlot();
|
|
|
|
| 714 |
|
| 715 |
// If job is not completed, show filenames only
|
| 716 |
if (jobStatus !== 'completed') {
|
| 717 |
+
Object.entries(filenameMap).forEach(([uuid, originalFilename], idx) => {
|
| 718 |
const row = resultsTableBody.insertRow();
|
| 719 |
+
row.innerHTML = `<td>${originalFilename}</td><td style="color:#bbb;">NA</td>`;
|
| 720 |
});
|
| 721 |
exportCsvBtn.disabled = true;
|
| 722 |
exportImagesBtn.disabled = true;
|
|
|
|
| 726 |
|
| 727 |
// If job is completed, show filtered detection counts
|
| 728 |
if (resultsObj) {
|
| 729 |
+
Object.entries(resultsObj).forEach(([uuid, detections], idx) => {
|
|
|
|
| 730 |
// Filter by confidence threshold
|
| 731 |
const threshold = parseFloat(confidenceSlider.value);
|
| 732 |
const filtered = detections.filter(d => d.score >= threshold);
|
| 733 |
+
const originalFilename = filenameMap[uuid] || uuid;
|
| 734 |
const row = resultsTableBody.insertRow();
|
| 735 |
+
row.innerHTML = `<td>${originalFilename}</td><td>${filtered.length}</td>`;
|
| 736 |
});
|
| 737 |
exportCsvBtn.disabled = false;
|
| 738 |
exportImagesBtn.disabled = false;
|
|
|
|
| 753 |
// Display results for current page
|
| 754 |
for (let i = startIndex; i < endIndex; i++) {
|
| 755 |
const result = currentResults[i];
|
| 756 |
+
const originalFilename = filenameMap[result.uuid] || result.uuid;
|
| 757 |
const row = resultsTableBody.insertRow();
|
| 758 |
+
row.innerHTML = `
|
| 759 |
+
<td>
|
| 760 |
+
<i class="ri-image-line"></i>
|
| 761 |
+
${originalFilename}
|
| 762 |
+
</td>
|
| 763 |
+
<td>${result.num_eggs}</td>
|
| 764 |
+
<td class="text-right">
|
| 765 |
+
<button class="view-button" data-index="${i}" title="Click to view image">
|
| 766 |
+
<i class="ri-eye-line"></i>
|
| 767 |
+
View
|
| 768 |
+
</button>
|
| 769 |
+
</td>
|
| 770 |
+
`;
|
| 771 |
+
// Store the original index to maintain image preview relationship
|
| 772 |
+
row.dataset.originalIndex = i;
|
| 773 |
+
}
|
| 774 |
// Wire up View buttons after rows are created
|
| 775 |
resultsTableBody.querySelectorAll('.view-button').forEach(btn => {
|
| 776 |
btn.addEventListener('click', (e) => {
|