forked from opendatahub-io/opendatahub-tests
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconstants.py
More file actions
67 lines (57 loc) · 2.05 KB
/
constants.py
File metadata and controls
67 lines (57 loc) · 2.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
from dataclasses import dataclass
from enum import Enum
from typing import List
class LlamaStackProviders:
"""LlamaStack provider identifiers."""
class Inference(str, Enum):
VLLM_INFERENCE = "vllm-inference"
class Safety(str, Enum):
TRUSTYAI_FMS = "trustyai_fms"
class Eval(str, Enum):
TRUSTYAI_LMEVAL = "trustyai_lmeval"
@dataclass
class TorchTuneTestExpectation:
"""Test expectation for TorchTune documentation questions."""
question: str
expected_keywords: List[str]
description: str
TORCHTUNE_TEST_EXPECTATIONS: List[TorchTuneTestExpectation] = [
TorchTuneTestExpectation(
question="what is torchtune",
expected_keywords=["torchtune", "pytorch", "fine-tuning", "training", "model"],
description="Should provide information about torchtune framework",
),
TorchTuneTestExpectation(
question="What do you know about LoRA?",
expected_keywords=[
"LoRA",
"parameter",
"efficient",
"fine-tuning",
"reduce",
],
description="Should provide information about LoRA (Low Rank Adaptation)",
),
TorchTuneTestExpectation(
question="How can I optimize model training for quantization?",
expected_keywords=[
"Quantization-Aware Training",
"QAT",
"training",
"fine-tuning",
"fake",
"quantized",
],
description="Should provide information about QAT (Quantization-Aware Training)",
),
TorchTuneTestExpectation(
question="Are there any memory optimizations for LoRA?",
expected_keywords=["QLoRA", "fine-tuning", "4-bit", "Optimization", "LoRA"],
description="Should provide information about QLoRA",
),
TorchTuneTestExpectation(
question="tell me about dora",
expected_keywords=["dora", "parameter", "magnitude", "direction", "fine-tuning"],
description="Should provide information about DoRA (Weight-Decomposed Low-Rank Adaptation)",
),
]