From 458ffbb9419882c1d36ab4797c06e8cceabb778b Mon Sep 17 00:00:00 2001 From: tako <0214.taiki@gmail.com> Date: Sun, 24 May 2020 07:35:03 +0900 Subject: [PATCH] fix bug I got the following error when I run this program with MBP. RuntimeError: Attempting to deserialize object on a CUDA device but torch.cuda.is_available() is False. If you are running on a CPU-only machine, please use torch.load with map_location=torch.device('cpu') to map your storages to the CPU. This bug is now fixed. --- demo/inference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demo/inference.py b/demo/inference.py index efff86a7..4ff05bab 100644 --- a/demo/inference.py +++ b/demo/inference.py @@ -223,7 +223,7 @@ def main(): if cfg.TEST.MODEL_FILE: print('=> loading model from {}'.format(cfg.TEST.MODEL_FILE)) - pose_model.load_state_dict(torch.load(cfg.TEST.MODEL_FILE), strict=False) + pose_model.load_state_dict(torch.load(cfg.TEST.MODEL_FILE, map_location=CTX), strict=False) else: print('expected model defined in config at TEST.MODEL_FILE')