-
Notifications
You must be signed in to change notification settings - Fork 56
Expand file tree
/
Copy pathtrain.py
More file actions
31 lines (24 loc) · 731 Bytes
/
train.py
File metadata and controls
31 lines (24 loc) · 731 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import os
from huggingface_hub import login, HfApi
from llama_recipes.finetuning import main as finetuning
def main():
login(token=os.environ["HUGGINGFACE_TOKEN"])
kwargs = {
"model_name": "meta-llama/Llama-2-7b-hf",
"use_peft": True,
"peft_method": "lora",
"quantization": True,
"batch_size_training": 2,
"dataset": "custom_dataset",
"custom_dataset.file": "./custom_dataset.py",
"output_dir": "./output_dir",
}
finetuning(**kwargs)
api = HfApi()
api.upload_folder(
folder_path='./output_dir/',
repo_id=os.environ["HUGGINGFACE_REPO"],
repo_type='model',
)
if __name__ == "__main__":
main()