Adding debug messages
This commit is contained in:
@@ -192,7 +192,9 @@ class YOLOWrapper:
|
|||||||
prepared_source, cleanup_path = self._prepare_source(source)
|
prepared_source, cleanup_path = self._prepare_source(source)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.info(f"Running inference on {source}")
|
logger.info(
|
||||||
|
f"Running inference on {source} -> prepared_source {prepared_source}"
|
||||||
|
)
|
||||||
results = self.model.predict(
|
results = self.model.predict(
|
||||||
source=prepared_source,
|
source=prepared_source,
|
||||||
conf=conf,
|
conf=conf,
|
||||||
|
|||||||
@@ -106,6 +106,7 @@ def apply_ultralytics_16bit_tiff_patches(*, force: bool = False) -> None:
|
|||||||
def preprocess_batch_16bit(self, batch: dict) -> dict: # type: ignore[override]
|
def preprocess_batch_16bit(self, batch: dict) -> dict: # type: ignore[override]
|
||||||
# Start from upstream behavior to keep device placement + multiscale identical,
|
# Start from upstream behavior to keep device placement + multiscale identical,
|
||||||
# but replace the 255 division with dtype-aware scaling.
|
# but replace the 255 division with dtype-aware scaling.
|
||||||
|
logger.info(f"Preprocessing batch with monkey-patched preprocess_batch")
|
||||||
for k, v in batch.items():
|
for k, v in batch.items():
|
||||||
if isinstance(v, torch.Tensor):
|
if isinstance(v, torch.Tensor):
|
||||||
batch[k] = v.to(self.device, non_blocking=self.device.type == "cuda")
|
batch[k] = v.to(self.device, non_blocking=self.device.type == "cuda")
|
||||||
|
|||||||
Reference in New Issue
Block a user