Adding debug messages
This commit is contained in:
@@ -192,7 +192,9 @@ class YOLOWrapper:
|
||||
prepared_source, cleanup_path = self._prepare_source(source)
|
||||
|
||||
try:
|
||||
logger.info(f"Running inference on {source}")
|
||||
logger.info(
|
||||
f"Running inference on {source} -> prepared_source {prepared_source}"
|
||||
)
|
||||
results = self.model.predict(
|
||||
source=prepared_source,
|
||||
conf=conf,
|
||||
|
||||
@@ -106,6 +106,7 @@ def apply_ultralytics_16bit_tiff_patches(*, force: bool = False) -> None:
|
||||
def preprocess_batch_16bit(self, batch: dict) -> dict: # type: ignore[override]
|
||||
# Start from upstream behavior to keep device placement + multiscale identical,
|
||||
# but replace the 255 division with dtype-aware scaling.
|
||||
logger.info(f"Preprocessing batch with monkey-patched preprocess_batch")
|
||||
for k, v in batch.items():
|
||||
if isinstance(v, torch.Tensor):
|
||||
batch[k] = v.to(self.device, non_blocking=self.device.type == "cuda")
|
||||
|
||||
Reference in New Issue
Block a user