@@ -469,7 +469,7 @@ def _runModelInference(self, images_tensor: torch.Tensor) -> Dict[str, Any]:
469469
470470 return predictions
471471
472- def _baseline_m_from_camera_locations (self , camera_locations , camera_ids = None ) -> float :
472+ def _baseline_metric_from_camera_locations (self , camera_locations , camera_ids = None ) -> float :
473473 """
474474 Robustly compute a metric baseline (meters) from camera_locations.
475475
@@ -588,16 +588,16 @@ def _processOutputs(self, predictions: Dict[str, Any], original_sizes: List[tupl
588588 camera_to_world_list .append (c2w )
589589
590590 # --- SCALE FIX: compute metric baseline from provided camera_locations ---
591- baseline_m = self ._baseline_m_from_camera_locations (camera_locations , camera_ids = camera_ids )
591+ baseline_m = self ._baseline_metric_from_camera_locations (camera_locations , camera_ids = camera_ids )
592592
593- if baseline_m <= 0 :
593+ if baseline_metric <= 0 :
594594 log .warning ("VGGT: camera_locations missing/invalid; skipping metric scaling (scale will be arbitrary)." )
595595
596- if baseline_m > 0 and len (camera_to_world_list ) >= 2 :
596+ if baseline_metric > 0 and len (camera_to_world_list ) >= 2 :
597597 b_units = self ._baseline_units (camera_to_world_list [0 ], camera_to_world_list [1 ])
598598 if b_units > 1e-6 :
599- s = baseline_m / b_units
600- log .info (f"Scaling VGGT outputs by s={ s :.6f} (baseline { baseline_m :.6f} m / { b_units :.6f} units)" )
599+ s = baseline_metric / b_units
600+ log .info (f"Scaling VGGT outputs by s={ s :.6f} (baseline { baseline_metric :.6f} m / { b_units :.6f} units)" )
601601
602602 # scale camera translations
603603 for k in range (len (camera_to_world_list )):
0 commit comments