Skip to content

Cache HuggingFace Model #6

Cache HuggingFace Model

Cache HuggingFace Model #6

# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: "Cache HuggingFace Model"
on:
workflow_dispatch:
inputs:
hf-model-id:
type: string
description: HuggingFace repo ID of the model to cache
required: true
jobs:
download:
name: Download Model
runs-on: self-hosted-nemo
environment: nemo-ci
env:
HF_HUB_OFFLINE: 0
HF_HOME: /mnt/datadrive/TestData/HF_HOME
steps:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install dependencies
run: |
pip install torch transformers "huggingface_hub[cli]"
- name: Log in to HuggingFace
env:
HF_ACCESS_TOKEN: ${{ secrets.HF_ACCESS_TOKEN }}
run: |
hf auth login --token ${HF_ACCESS_TOKEN}
- name: Download model
env:
MODEL_ID: ${{ github.event.inputs.hf-model-id }}
shell: python
run: |
import os
from transformers import AutoModel
_ = AutoModel.from_pretrained(os.getenv("MODEL_ID"))