Adding result shower

This commit is contained in:
2025-12-10 16:55:28 +02:00
parent 5370d31dce
commit 833b222fad
7 changed files with 672 additions and 71 deletions

View File

@@ -42,6 +42,7 @@ class InferenceEngine:
relative_path: str,
conf: float = 0.25,
save_to_db: bool = True,
repository_root: Optional[str] = None,
) -> Dict:
"""
Detect objects in a single image.
@@ -51,11 +52,17 @@ class InferenceEngine:
relative_path: Relative path from repository root
conf: Confidence threshold
save_to_db: Whether to save results to database
repository_root: Base directory used to compute relative_path (if known)
Returns:
Dictionary with detection results
"""
try:
# Normalize storage path (fall back to absolute path when repo root is unknown)
stored_relative_path = relative_path
if not repository_root:
stored_relative_path = str(Path(image_path).resolve())
# Get image dimensions
img = Image.open(image_path)
width, height = img.size
@@ -66,34 +73,58 @@ class InferenceEngine:
# Add/get image in database
image_id = self.db_manager.get_or_create_image(
relative_path=relative_path,
relative_path=stored_relative_path,
filename=Path(image_path).name,
width=width,
height=height,
)
# Save detections to database
if save_to_db and detections:
detection_records = []
for det in detections:
# Use normalized bbox from detection
bbox_normalized = det[
"bbox_normalized"
] # [x_min, y_min, x_max, y_max]
inserted_count = 0
deleted_count = 0
record = {
"image_id": image_id,
"model_id": self.model_id,
"class_name": det["class_name"],
"bbox": tuple(bbox_normalized),
"confidence": det["confidence"],
"segmentation_mask": det.get("segmentation_mask"),
"metadata": {"class_id": det["class_id"]},
}
detection_records.append(record)
# Save detections to database, replacing any previous results for this image/model
if save_to_db:
deleted_count = self.db_manager.delete_detections_for_image(
image_id, self.model_id
)
if detections:
detection_records = []
for det in detections:
# Use normalized bbox from detection
bbox_normalized = det[
"bbox_normalized"
] # [x_min, y_min, x_max, y_max]
self.db_manager.add_detections_batch(detection_records)
logger.info(f"Saved {len(detection_records)} detections to database")
metadata = {
"class_id": det["class_id"],
"source_path": str(Path(image_path).resolve()),
}
if repository_root:
metadata["repository_root"] = str(
Path(repository_root).resolve()
)
record = {
"image_id": image_id,
"model_id": self.model_id,
"class_name": det["class_name"],
"bbox": tuple(bbox_normalized),
"confidence": det["confidence"],
"segmentation_mask": det.get("segmentation_mask"),
"metadata": metadata,
}
detection_records.append(record)
inserted_count = self.db_manager.add_detections_batch(
detection_records
)
logger.info(
f"Saved {inserted_count} detections to database (replaced {deleted_count})"
)
else:
logger.info(
f"Detection run removed {deleted_count} stale entries but produced no new detections"
)
return {
"success": True,
@@ -142,7 +173,12 @@ class InferenceEngine:
rel_path = get_relative_path(image_path, repository_root)
# Perform detection
result = self.detect_single(image_path, rel_path, conf)
result = self.detect_single(
image_path,
rel_path,
conf=conf,
repository_root=repository_root,
)
results.append(result)
# Update progress