Skip to content

Commit 52b9685

Browse files
authored
Handle move errors (#4317)
* Handle move errors * Make a warning
1 parent d123713 commit 52b9685

File tree

1 file changed

+11
-5
lines changed

1 file changed

+11
-5
lines changed

Diff for: backend/model_server/main.py

+11-5
Original file line numberDiff line numberDiff line change
@@ -65,11 +65,17 @@ async def lifespan(app: FastAPI) -> AsyncGenerator:
6565

6666
app.state.gpu_type = gpu_type
6767

68-
if TEMP_HF_CACHE_PATH.is_dir():
69-
logger.notice("Moving contents of temp_huggingface to huggingface cache.")
70-
_move_files_recursively(TEMP_HF_CACHE_PATH, HF_CACHE_PATH)
71-
shutil.rmtree(TEMP_HF_CACHE_PATH, ignore_errors=True)
72-
logger.notice("Moved contents of temp_huggingface to huggingface cache.")
68+
try:
69+
if TEMP_HF_CACHE_PATH.is_dir():
70+
logger.notice("Moving contents of temp_huggingface to huggingface cache.")
71+
_move_files_recursively(TEMP_HF_CACHE_PATH, HF_CACHE_PATH)
72+
shutil.rmtree(TEMP_HF_CACHE_PATH, ignore_errors=True)
73+
logger.notice("Moved contents of temp_huggingface to huggingface cache.")
74+
except Exception as e:
75+
logger.warning(
76+
f"Error moving contents of temp_huggingface to huggingface cache: {e}. "
77+
"This is not a critical error and the model server will continue to run."
78+
)
7379

7480
torch.set_num_threads(max(MIN_THREADS_ML_MODELS, torch.get_num_threads()))
7581
logger.notice(f"Torch Threads: {torch.get_num_threads()}")

0 commit comments

Comments
 (0)